diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml new file mode 100644 index 000000000..88936c584 --- /dev/null +++ b/.github/.OwlBot.lock.yaml @@ -0,0 +1,17 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml new file mode 100644 index 000000000..898efd861 --- /dev/null +++ b/.github/.OwlBot.yaml @@ -0,0 +1,25 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +docker: + image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest + +deep-remove-regex: + - /owl-bot-staging + +deep-copy-regex: + - source: /google/cloud/compute/(v.*)/compute-v.*-py/(.*) + dest: /owl-bot-staging/$1/$2 + +begin-after-commit-hash: 70f7f0525414fe4dfeb2fc2e81546b073f83a621 diff --git a/docs/index.rst b/docs/index.rst index ace04927d..dbd139eca 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,5 +1,8 @@ .. include:: README.rst +.. include:: multiprocessing.rst + + API Reference ------------- .. toctree:: @@ -15,6 +18,6 @@ Changelog For a list of all ``google-cloud-compute`` releases: .. toctree:: - :maxdepth: 2 + :maxdepth: 2 - changelog \ No newline at end of file + changelog diff --git a/google/cloud/compute_v1/services/accelerator_types/client.py b/google/cloud/compute_v1/services/accelerator_types/client.py index fd4e6ecd5..9475b87c6 100644 --- a/google/cloud/compute_v1/services/accelerator_types/client.py +++ b/google/cloud/compute_v1/services/accelerator_types/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.accelerator_types import pagers from google.cloud.compute_v1.types import compute from .transports.base import AcceleratorTypesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,21 +339,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListAcceleratorTypesRequest = None, + request: Union[compute.AggregatedListAcceleratorTypesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of accelerator types. Args: - request (google.cloud.compute_v1.types.AggregatedListAcceleratorTypesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListAcceleratorTypesRequest, dict]): The request object. A request message for AcceleratorTypes.AggregatedList. See the method description for details. @@ -403,19 +415,19 @@ def aggregated_list( def get( self, - request: compute.GetAcceleratorTypeRequest = None, + request: Union[compute.GetAcceleratorTypeRequest, dict] = None, *, project: str = None, zone: str = None, accelerator_type: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AcceleratorType: r"""Returns the specified accelerator type. Args: - request (google.cloud.compute_v1.types.GetAcceleratorTypeRequest): + request (Union[google.cloud.compute_v1.types.GetAcceleratorTypeRequest, dict]): The request object. A request message for AcceleratorTypes.Get. See the method description for details. @@ -493,11 +505,11 @@ def get( def list( self, - request: compute.ListAcceleratorTypesRequest = None, + request: Union[compute.ListAcceleratorTypesRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -505,7 +517,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListAcceleratorTypesRequest): + request (Union[google.cloud.compute_v1.types.ListAcceleratorTypesRequest, dict]): The request object. A request message for AcceleratorTypes.List. See the method description for details. @@ -574,6 +586,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/accelerator_types/pagers.py b/google/cloud/compute_v1/services/accelerator_types/pagers.py index d2fc460ae..00fa93ddf 100644 --- a/google/cloud/compute_v1/services/accelerator_types/pagers.py +++ b/google/cloud/compute_v1/services/accelerator_types/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AcceleratorTypeAggregatedList]: + def pages(self) -> Iterator[compute.AcceleratorTypeAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.AcceleratorTypesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.AcceleratorTypesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AcceleratorTypeList]: + def pages(self) -> Iterator[compute.AcceleratorTypeList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.AcceleratorType]: + def __iter__(self) -> Iterator[compute.AcceleratorType]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/accelerator_types/transports/base.py b/google/cloud/compute_v1/services/accelerator_types/transports/base.py index 08226272e..3a827cb71 100644 --- a/google/cloud/compute_v1/services/accelerator_types/transports/base.py +++ b/google/cloud/compute_v1/services/accelerator_types/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class AcceleratorTypesTransport(abc.ABC): """Abstract transport class for AcceleratorTypes.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -170,6 +134,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/accelerator_types/transports/rest.py b/google/cloud/compute_v1/services/accelerator_types/transports/rest.py index 55cad881c..7fa3ed7de 100644 --- a/google/cloud/compute_v1/services/accelerator_types/transports/rest.py +++ b/google/cloud/compute_v1/services/accelerator_types/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + AcceleratorTypesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import AcceleratorTypesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class AcceleratorTypesRestTransport(AcceleratorTypesTransport): @@ -54,6 +73,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -81,6 +101,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -99,10 +124,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListAcceleratorTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AcceleratorTypeAggregatedList: r"""Call the aggregated list method over HTTP. @@ -113,6 +140,9 @@ def aggregated_list( AcceleratorTypes.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -121,35 +151,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/acceleratorTypes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/acceleratorTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListAcceleratorTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListAcceleratorTypesRequest.to_json( + compute.AggregatedListAcceleratorTypesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListAcceleratorTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListAcceleratorTypesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListAcceleratorTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListAcceleratorTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListAcceleratorTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListAcceleratorTypesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -161,10 +210,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetAcceleratorTypeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AcceleratorType: r"""Call the get method over HTTP. @@ -175,6 +226,9 @@ def get( AcceleratorTypes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -191,23 +245,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes/{accelerator_type}".format( - host=self._host, - project=request.project, - zone=request.zone, - accelerator_type=request.accelerator_type, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes/{accelerator_type}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("accelerator_type", "acceleratorType"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetAcceleratorTypeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetAcceleratorTypeRequest.to_json( + compute.GetAcceleratorTypeRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -219,10 +304,12 @@ def get( response.content, ignore_unknown_fields=True ) - def list( + def _list( self, request: compute.ListAcceleratorTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AcceleratorTypeList: r"""Call the list method over HTTP. @@ -233,6 +320,9 @@ def list( AcceleratorTypes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -241,30 +331,53 @@ def list( Contains a list of accelerator types. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListAcceleratorTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListAcceleratorTypesRequest.to_json( + compute.ListAcceleratorTypesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListAcceleratorTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListAcceleratorTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListAcceleratorTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListAcceleratorTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListAcceleratorTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -276,5 +389,29 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListAcceleratorTypesRequest], + compute.AcceleratorTypeAggregatedList, + ]: + return self._aggregated_list + + @property + def get( + self, + ) -> Callable[[compute.GetAcceleratorTypeRequest], compute.AcceleratorType]: + return self._get + + @property + def list( + self, + ) -> Callable[[compute.ListAcceleratorTypesRequest], compute.AcceleratorTypeList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("AcceleratorTypesRestTransport",) diff --git a/google/cloud/compute_v1/services/addresses/client.py b/google/cloud/compute_v1/services/addresses/client.py index 90cb18e50..bf1b7290e 100644 --- a/google/cloud/compute_v1/services/addresses/client.py +++ b/google/cloud/compute_v1/services/addresses/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.addresses import pagers from google.cloud.compute_v1.types import compute from .transports.base import AddressesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListAddressesRequest = None, + request: Union[compute.AggregatedListAddressesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of addresses. Args: - request (google.cloud.compute_v1.types.AggregatedListAddressesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListAddressesRequest, dict]): The request object. A request message for Addresses.AggregatedList. See the method description for details. @@ -399,19 +411,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteAddressRequest = None, + request: Union[compute.DeleteAddressRequest, dict] = None, *, project: str = None, region: str = None, address: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified address resource. Args: - request (google.cloud.compute_v1.types.DeleteAddressRequest): + request (Union[google.cloud.compute_v1.types.DeleteAddressRequest, dict]): The request object. A request message for Addresses.Delete. See the method description for details. @@ -494,19 +506,19 @@ def delete( def get( self, - request: compute.GetAddressRequest = None, + request: Union[compute.GetAddressRequest, dict] = None, *, project: str = None, region: str = None, address: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Address: r"""Returns the specified address resource. Args: - request (google.cloud.compute_v1.types.GetAddressRequest): + request (Union[google.cloud.compute_v1.types.GetAddressRequest, dict]): The request object. A request message for Addresses.Get. See the method description for details. project (str): @@ -580,12 +592,12 @@ def get( def insert( self, - request: compute.InsertAddressRequest = None, + request: Union[compute.InsertAddressRequest, dict] = None, *, project: str = None, region: str = None, address_resource: compute.Address = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -593,7 +605,7 @@ def insert( by using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertAddressRequest): + request (Union[google.cloud.compute_v1.types.InsertAddressRequest, dict]): The request object. A request message for Addresses.Insert. See the method description for details. @@ -674,11 +686,11 @@ def insert( def list( self, - request: compute.ListAddressesRequest = None, + request: Union[compute.ListAddressesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -686,7 +698,7 @@ def list( specified region. Args: - request (google.cloud.compute_v1.types.ListAddressesRequest): + request (Union[google.cloud.compute_v1.types.ListAddressesRequest, dict]): The request object. A request message for Addresses.List. See the method description for details. project (str): @@ -752,6 +764,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/addresses/pagers.py b/google/cloud/compute_v1/services/addresses/pagers.py index 1d877028a..36d2e294d 100644 --- a/google/cloud/compute_v1/services/addresses/pagers.py +++ b/google/cloud/compute_v1/services/addresses/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AddressAggregatedList]: + def pages(self) -> Iterator[compute.AddressAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.AddressesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.AddressesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AddressList]: + def pages(self) -> Iterator[compute.AddressList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Address]: + def __iter__(self) -> Iterator[compute.Address]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/addresses/transports/base.py b/google/cloud/compute_v1/services/addresses/transports/base.py index 5cdfc8801..02e580d7a 100644 --- a/google/cloud/compute_v1/services/addresses/transports/base.py +++ b/google/cloud/compute_v1/services/addresses/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class AddressesTransport(abc.ABC): """Abstract transport class for Addresses.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/addresses/transports/rest.py b/google/cloud/compute_v1/services/addresses/transports/rest.py index c6f087254..873398f9a 100644 --- a/google/cloud/compute_v1/services/addresses/transports/rest.py +++ b/google/cloud/compute_v1/services/addresses/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import AddressesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import AddressesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class AddressesRestTransport(AddressesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListAddressesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AddressAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( Addresses.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/addresses".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/addresses", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListAddressesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListAddressesRequest.to_json( + compute.AggregatedListAddressesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListAddressesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListAddressesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListAddressesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListAddressesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListAddressesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListAddressesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteAddressRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( Addresses.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +248,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/addresses/{address}".format( - host=self._host, - project=request.project, - region=request.region, - address=request.address, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/addresses/{address}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("address", "address"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteAddressRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteAddressRequest.to_json( + compute.DeleteAddressRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteAddressRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +305,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetAddressRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Address: r"""Call the get method over HTTP. @@ -235,6 +320,9 @@ def get( The request object. A request message for Addresses.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -250,23 +338,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/addresses/{address}".format( - host=self._host, - project=request.project, - region=request.region, - address=request.address, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/addresses/{address}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("address", "address"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetAddressRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetAddressRequest.to_json( + compute.GetAddressRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -276,10 +395,12 @@ def get( # Return the response return compute.Address.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertAddressRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -290,6 +411,9 @@ def insert( Addresses.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -313,30 +437,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/addresses", + "body": "address_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertAddressRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Address.to_json( - request.address_resource, + compute.Address(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/addresses".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertAddressRequest.to_json( + compute.InsertAddressRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertAddressRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -347,10 +501,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListAddressesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AddressList: r"""Call the list method over HTTP. @@ -360,6 +516,9 @@ def list( The request object. A request message for Addresses.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -368,30 +527,53 @@ def list( Contains a list of addresses. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/addresses".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/addresses", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListAddressesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListAddressesRequest.to_json( + compute.ListAddressesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListAddressesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListAddressesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListAddressesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListAddressesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListAddressesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -403,5 +585,32 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListAddressesRequest], compute.AddressAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteAddressRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetAddressRequest], compute.Address]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertAddressRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListAddressesRequest], compute.AddressList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("AddressesRestTransport",) diff --git a/google/cloud/compute_v1/services/autoscalers/client.py b/google/cloud/compute_v1/services/autoscalers/client.py index 704af68ae..e3a3d5565 100644 --- a/google/cloud/compute_v1/services/autoscalers/client.py +++ b/google/cloud/compute_v1/services/autoscalers/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.autoscalers import pagers from google.cloud.compute_v1.types import compute from .transports.base import AutoscalersTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListAutoscalersRequest = None, + request: Union[compute.AggregatedListAutoscalersRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of autoscalers. Args: - request (google.cloud.compute_v1.types.AggregatedListAutoscalersRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListAutoscalersRequest, dict]): The request object. A request message for Autoscalers.AggregatedList. See the method description for details. @@ -399,19 +411,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteAutoscalerRequest = None, + request: Union[compute.DeleteAutoscalerRequest, dict] = None, *, project: str = None, zone: str = None, autoscaler: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified autoscaler. Args: - request (google.cloud.compute_v1.types.DeleteAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.DeleteAutoscalerRequest, dict]): The request object. A request message for Autoscalers.Delete. See the method description for details. @@ -492,12 +504,12 @@ def delete( def get( self, - request: compute.GetAutoscalerRequest = None, + request: Union[compute.GetAutoscalerRequest, dict] = None, *, project: str = None, zone: str = None, autoscaler: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Autoscaler: @@ -506,7 +518,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.GetAutoscalerRequest, dict]): The request object. A request message for Autoscalers.Get. See the method description for details. project (str): @@ -582,12 +594,12 @@ def get( def insert( self, - request: compute.InsertAutoscalerRequest = None, + request: Union[compute.InsertAutoscalerRequest, dict] = None, *, project: str = None, zone: str = None, autoscaler_resource: compute.Autoscaler = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -595,7 +607,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.InsertAutoscalerRequest, dict]): The request object. A request message for Autoscalers.Insert. See the method description for details. @@ -676,11 +688,11 @@ def insert( def list( self, - request: compute.ListAutoscalersRequest = None, + request: Union[compute.ListAutoscalersRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -688,7 +700,7 @@ def list( specified zone. Args: - request (google.cloud.compute_v1.types.ListAutoscalersRequest): + request (Union[google.cloud.compute_v1.types.ListAutoscalersRequest, dict]): The request object. A request message for Autoscalers.List. See the method description for details. @@ -758,12 +770,12 @@ def list( def patch( self, - request: compute.PatchAutoscalerRequest = None, + request: Union[compute.PatchAutoscalerRequest, dict] = None, *, project: str = None, zone: str = None, autoscaler_resource: compute.Autoscaler = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -773,7 +785,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.PatchAutoscalerRequest, dict]): The request object. A request message for Autoscalers.Patch. See the method description for details. @@ -854,12 +866,12 @@ def patch( def update( self, - request: compute.UpdateAutoscalerRequest = None, + request: Union[compute.UpdateAutoscalerRequest, dict] = None, *, project: str = None, zone: str = None, autoscaler_resource: compute.Autoscaler = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -867,7 +879,7 @@ def update( the data included in the request. Args: - request (google.cloud.compute_v1.types.UpdateAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.UpdateAutoscalerRequest, dict]): The request object. A request message for Autoscalers.Update. See the method description for details. @@ -946,6 +958,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/autoscalers/pagers.py b/google/cloud/compute_v1/services/autoscalers/pagers.py index 17c841915..17a875c02 100644 --- a/google/cloud/compute_v1/services/autoscalers/pagers.py +++ b/google/cloud/compute_v1/services/autoscalers/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AutoscalerAggregatedList]: + def pages(self) -> Iterator[compute.AutoscalerAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.AutoscalersScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.AutoscalersScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AutoscalerList]: + def pages(self) -> Iterator[compute.AutoscalerList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Autoscaler]: + def __iter__(self) -> Iterator[compute.Autoscaler]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/autoscalers/transports/base.py b/google/cloud/compute_v1/services/autoscalers/transports/base.py index d9fed1df4..f214fee22 100644 --- a/google/cloud/compute_v1/services/autoscalers/transports/base.py +++ b/google/cloud/compute_v1/services/autoscalers/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class AutoscalersTransport(abc.ABC): """Abstract transport class for Autoscalers.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -181,6 +145,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/autoscalers/transports/rest.py b/google/cloud/compute_v1/services/autoscalers/transports/rest.py index 44eb6bf83..b5c8ccd71 100644 --- a/google/cloud/compute_v1/services/autoscalers/transports/rest.py +++ b/google/cloud/compute_v1/services/autoscalers/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import AutoscalersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import AutoscalersTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class AutoscalersRestTransport(AutoscalersTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListAutoscalersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AutoscalerAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( Autoscalers.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/autoscalers".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/autoscalers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListAutoscalersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListAutoscalersRequest.to_json( + compute.AggregatedListAutoscalersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListAutoscalersRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListAutoscalersRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListAutoscalersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListAutoscalersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListAutoscalersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListAutoscalersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( Autoscalers.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +248,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}".format( - host=self._host, - project=request.project, - zone=request.zone, - autoscaler=request.autoscaler, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("autoscaler", "autoscaler"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteAutoscalerRequest.to_json( + compute.DeleteAutoscalerRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +305,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Autoscaler: r"""Call the get method over HTTP. @@ -236,6 +321,9 @@ def get( Autoscalers.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -256,23 +344,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}".format( - host=self._host, - project=request.project, - zone=request.zone, - autoscaler=request.autoscaler, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("autoscaler", "autoscaler"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetAutoscalerRequest.to_json( + compute.GetAutoscalerRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -284,10 +403,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -298,6 +419,9 @@ def insert( Autoscalers.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -321,30 +445,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/autoscalers", + "body": "autoscaler_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Autoscaler.to_json( - request.autoscaler_resource, + compute.Autoscaler(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/autoscalers".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertAutoscalerRequest.to_json( + compute.InsertAutoscalerRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -355,10 +509,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListAutoscalersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AutoscalerList: r"""Call the list method over HTTP. @@ -369,6 +525,9 @@ def list( Autoscalers.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -379,30 +538,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/autoscalers".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/autoscalers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListAutoscalersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListAutoscalersRequest.to_json( + compute.ListAutoscalersRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListAutoscalersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListAutoscalersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListAutoscalersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListAutoscalersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListAutoscalersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -414,10 +596,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -428,6 +612,9 @@ def patch( Autoscalers.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -451,32 +638,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/autoscalers", + "body": "autoscaler_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.PatchAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Autoscaler.to_json( - request.autoscaler_resource, + compute.Autoscaler(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/autoscalers".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchAutoscalerRequest.to_json( + compute.PatchAutoscalerRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchAutoscalerRequest.autoscaler in request: - query_params["autoscaler"] = request.autoscaler - if compute.PatchAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -487,10 +702,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -501,6 +718,9 @@ def update( Autoscalers.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -524,32 +744,60 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/zones/{zone}/autoscalers", + "body": "autoscaler_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdateAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Autoscaler.to_json( - request.autoscaler_resource, + compute.Autoscaler(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/autoscalers".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateAutoscalerRequest.to_json( + compute.UpdateAutoscalerRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateAutoscalerRequest.autoscaler in request: - query_params["autoscaler"] = request.autoscaler - if compute.UpdateAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -560,5 +808,42 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListAutoscalersRequest], compute.AutoscalerAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteAutoscalerRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetAutoscalerRequest], compute.Autoscaler]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertAutoscalerRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListAutoscalersRequest], compute.AutoscalerList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchAutoscalerRequest], compute.Operation]: + return self._patch + + @property + def update(self) -> Callable[[compute.UpdateAutoscalerRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("AutoscalersRestTransport",) diff --git a/google/cloud/compute_v1/services/backend_buckets/client.py b/google/cloud/compute_v1/services/backend_buckets/client.py index 7c8cda7ae..6517df7e9 100644 --- a/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/google/cloud/compute_v1/services/backend_buckets/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.backend_buckets import pagers from google.cloud.compute_v1.types import compute from .transports.base import BackendBucketsTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,16 +337,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_signed_url_key( self, - request: compute.AddSignedUrlKeyBackendBucketRequest = None, + request: Union[compute.AddSignedUrlKeyBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket: str = None, signed_url_key_resource: compute.SignedUrlKey = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -343,7 +355,7 @@ def add_signed_url_key( for this backend bucket. Args: - request (google.cloud.compute_v1.types.AddSignedUrlKeyBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.AddSignedUrlKeyBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.AddSignedUrlKey. See the method description for details. @@ -428,18 +440,18 @@ def add_signed_url_key( def delete( self, - request: compute.DeleteBackendBucketRequest = None, + request: Union[compute.DeleteBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified BackendBucket resource. Args: - request (google.cloud.compute_v1.types.DeleteBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.DeleteBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.Delete. See the method description for details. @@ -515,12 +527,12 @@ def delete( def delete_signed_url_key( self, - request: compute.DeleteSignedUrlKeyBackendBucketRequest = None, + request: Union[compute.DeleteSignedUrlKeyBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket: str = None, key_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -528,7 +540,7 @@ def delete_signed_url_key( URLs for this backend bucket. Args: - request (google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.DeleteSignedUrlKey. See the method description for details. @@ -615,11 +627,11 @@ def delete_signed_url_key( def get( self, - request: compute.GetBackendBucketRequest = None, + request: Union[compute.GetBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendBucket: @@ -628,7 +640,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.GetBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.Get. See the method description for details. @@ -694,11 +706,11 @@ def get( def insert( self, - request: compute.InsertBackendBucketRequest = None, + request: Union[compute.InsertBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket_resource: compute.BackendBucket = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -706,7 +718,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.InsertBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.Insert. See the method description for details. @@ -780,10 +792,10 @@ def insert( def list( self, - request: compute.ListBackendBucketsRequest = None, + request: Union[compute.ListBackendBucketsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -791,7 +803,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListBackendBucketsRequest): + request (Union[google.cloud.compute_v1.types.ListBackendBucketsRequest, dict]): The request object. A request message for BackendBuckets.List. See the method description for details. @@ -854,12 +866,12 @@ def list( def patch( self, - request: compute.PatchBackendBucketRequest = None, + request: Union[compute.PatchBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket: str = None, backend_bucket_resource: compute.BackendBucket = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -869,7 +881,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.PatchBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.Patch. See the method description for details. @@ -952,12 +964,12 @@ def patch( def update( self, - request: compute.UpdateBackendBucketRequest = None, + request: Union[compute.UpdateBackendBucketRequest, dict] = None, *, project: str = None, backend_bucket: str = None, backend_bucket_resource: compute.BackendBucket = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -965,7 +977,7 @@ def update( data included in the request. Args: - request (google.cloud.compute_v1.types.UpdateBackendBucketRequest): + request (Union[google.cloud.compute_v1.types.UpdateBackendBucketRequest, dict]): The request object. A request message for BackendBuckets.Update. See the method description for details. @@ -1046,6 +1058,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/backend_buckets/pagers.py b/google/cloud/compute_v1/services/backend_buckets/pagers.py index fccc5dad5..0e481a562 100644 --- a/google/cloud/compute_v1/services/backend_buckets/pagers.py +++ b/google/cloud/compute_v1/services/backend_buckets/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.BackendBucketList]: + def pages(self) -> Iterator[compute.BackendBucketList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.BackendBucket]: + def __iter__(self) -> Iterator[compute.BackendBucket]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/backend_buckets/transports/base.py b/google/cloud/compute_v1/services/backend_buckets/transports/base.py index 9ea956822..cce4d7bc2 100644 --- a/google/cloud/compute_v1/services/backend_buckets/transports/base.py +++ b/google/cloud/compute_v1/services/backend_buckets/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class BackendBucketsTransport(abc.ABC): """Abstract transport class for BackendBuckets.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -186,6 +150,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_signed_url_key( self, diff --git a/google/cloud/compute_v1/services/backend_buckets/transports/rest.py b/google/cloud/compute_v1/services/backend_buckets/transports/rest.py index 2dc0343ca..e60f927d8 100644 --- a/google/cloud/compute_v1/services/backend_buckets/transports/rest.py +++ b/google/cloud/compute_v1/services/backend_buckets/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + BackendBucketsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import BackendBucketsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class BackendBucketsRestTransport(BackendBucketsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_signed_url_key( + def _add_signed_url_key( self, request: compute.AddSignedUrlKeyBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add signed url key method over HTTP. @@ -112,6 +139,9 @@ def add_signed_url_key( BackendBuckets.AddSignedUrlKey. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,32 +165,62 @@ def add_signed_url_key( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/addSignedUrlKey", + "body": "signed_url_key_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_bucket", "backendBucket"), + ("project", "project"), + ] + + request_kwargs = compute.AddSignedUrlKeyBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SignedUrlKey.to_json( - request.signed_url_key_resource, + compute.SignedUrlKey(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/addSignedUrlKey".format( - host=self._host, - project=request.project, - backend_bucket=request.backend_bucket, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddSignedUrlKeyBackendBucketRequest.to_json( + compute.AddSignedUrlKeyBackendBucketRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddSignedUrlKeyBackendBucketRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -171,10 +231,12 @@ def add_signed_url_key( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -185,6 +247,9 @@ def delete( BackendBuckets.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -208,24 +273,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}".format( - host=self._host, - project=request.project, - backend_bucket=request.backend_bucket, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_bucket", "backendBucket"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteBackendBucketRequest.to_json( + compute.DeleteBackendBucketRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteBackendBucketRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -235,10 +329,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_signed_url_key( + def _delete_signed_url_key( self, request: compute.DeleteSignedUrlKeyBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete signed url key method over HTTP. @@ -249,6 +345,9 @@ def delete_signed_url_key( BackendBuckets.DeleteSignedUrlKey. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -272,25 +371,56 @@ def delete_signed_url_key( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/deleteSignedUrlKey".format( - host=self._host, - project=request.project, - backend_bucket=request.backend_bucket, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/deleteSignedUrlKey", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_bucket", "backendBucket"), + ("key_name", "keyName"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteSignedUrlKeyBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSignedUrlKeyBackendBucketRequest.to_json( + compute.DeleteSignedUrlKeyBackendBucketRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["keyName"] = request.key_name - if compute.DeleteSignedUrlKeyBackendBucketRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -300,10 +430,12 @@ def delete_signed_url_key( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendBucket: r"""Call the get method over HTTP. @@ -314,6 +446,9 @@ def get( BackendBuckets.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -327,22 +462,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}".format( - host=self._host, - project=request.project, - backend_bucket=request.backend_bucket, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_bucket", "backendBucket"), + ("project", "project"), + ] + + request_kwargs = compute.GetBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetBackendBucketRequest.to_json( + compute.GetBackendBucketRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -354,10 +520,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -368,6 +536,9 @@ def insert( BackendBuckets.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -391,30 +562,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendBuckets", + "body": "backend_bucket_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendBucket.to_json( - request.backend_bucket_resource, + compute.BackendBucket(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertBackendBucketRequest.to_json( + compute.InsertBackendBucketRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertBackendBucketRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -425,10 +625,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListBackendBucketsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendBucketList: r"""Call the list method over HTTP. @@ -439,6 +641,9 @@ def list( BackendBuckets.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -449,30 +654,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendBuckets", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListBackendBucketsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListBackendBucketsRequest.to_json( + compute.ListBackendBucketsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListBackendBucketsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListBackendBucketsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListBackendBucketsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListBackendBucketsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListBackendBucketsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -484,10 +711,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -498,6 +727,9 @@ def patch( BackendBuckets.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -521,32 +753,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}", + "body": "backend_bucket_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_bucket", "backendBucket"), + ("project", "project"), + ] + + request_kwargs = compute.PatchBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendBucket.to_json( - request.backend_bucket_resource, + compute.BackendBucket(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}".format( - host=self._host, - project=request.project, - backend_bucket=request.backend_bucket, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchBackendBucketRequest.to_json( + compute.PatchBackendBucketRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchBackendBucketRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -557,10 +817,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateBackendBucketRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -571,6 +833,9 @@ def update( BackendBuckets.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -594,32 +859,60 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}", + "body": "backend_bucket_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_bucket", "backendBucket"), + ("project", "project"), + ] + + request_kwargs = compute.UpdateBackendBucketRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendBucket.to_json( - request.backend_bucket_resource, + compute.BackendBucket(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}".format( - host=self._host, - project=request.project, - backend_bucket=request.backend_bucket, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateBackendBucketRequest.to_json( + compute.UpdateBackendBucketRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateBackendBucketRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -630,5 +923,52 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_signed_url_key( + self, + ) -> Callable[[compute.AddSignedUrlKeyBackendBucketRequest], compute.Operation]: + return self._add_signed_url_key + + @property + def delete( + self, + ) -> Callable[[compute.DeleteBackendBucketRequest], compute.Operation]: + return self._delete + + @property + def delete_signed_url_key( + self, + ) -> Callable[[compute.DeleteSignedUrlKeyBackendBucketRequest], compute.Operation]: + return self._delete_signed_url_key + + @property + def get(self) -> Callable[[compute.GetBackendBucketRequest], compute.BackendBucket]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertBackendBucketRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListBackendBucketsRequest], compute.BackendBucketList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchBackendBucketRequest], compute.Operation]: + return self._patch + + @property + def update( + self, + ) -> Callable[[compute.UpdateBackendBucketRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("BackendBucketsRestTransport",) diff --git a/google/cloud/compute_v1/services/backend_services/client.py b/google/cloud/compute_v1/services/backend_services/client.py index dfa8b4528..aa61932c0 100644 --- a/google/cloud/compute_v1/services/backend_services/client.py +++ b/google/cloud/compute_v1/services/backend_services/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.backend_services import pagers from google.cloud.compute_v1.types import compute from .transports.base import BackendServicesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,16 +337,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_signed_url_key( self, - request: compute.AddSignedUrlKeyBackendServiceRequest = None, + request: Union[compute.AddSignedUrlKeyBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, signed_url_key_resource: compute.SignedUrlKey = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -343,7 +355,7 @@ def add_signed_url_key( for this backend service. Args: - request (google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.AddSignedUrlKeyBackendServiceRequest, dict]): The request object. A request message for BackendServices.AddSignedUrlKey. See the method description for details. @@ -428,10 +440,10 @@ def add_signed_url_key( def aggregated_list( self, - request: compute.AggregatedListBackendServicesRequest = None, + request: Union[compute.AggregatedListBackendServicesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -439,7 +451,7 @@ def aggregated_list( regional and global, available to the specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListBackendServicesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListBackendServicesRequest, dict]): The request object. A request message for BackendServices.AggregatedList. See the method description for details. @@ -504,18 +516,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteBackendServiceRequest = None, + request: Union[compute.DeleteBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified BackendService resource. Args: - request (google.cloud.compute_v1.types.DeleteBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.DeleteBackendServiceRequest, dict]): The request object. A request message for BackendServices.Delete. See the method description for details. @@ -591,12 +603,12 @@ def delete( def delete_signed_url_key( self, - request: compute.DeleteSignedUrlKeyBackendServiceRequest = None, + request: Union[compute.DeleteSignedUrlKeyBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, key_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -604,7 +616,7 @@ def delete_signed_url_key( URLs for this backend service. Args: - request (google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.DeleteSignedUrlKeyBackendServiceRequest, dict]): The request object. A request message for BackendServices.DeleteSignedUrlKey. See the method description for details. @@ -691,11 +703,11 @@ def delete_signed_url_key( def get( self, - request: compute.GetBackendServiceRequest = None, + request: Union[compute.GetBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendService: @@ -703,7 +715,7 @@ def get( list of available backend services. Args: - request (google.cloud.compute_v1.types.GetBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.GetBackendServiceRequest, dict]): The request object. A request message for BackendServices.Get. See the method description for details. @@ -779,12 +791,12 @@ def get( def get_health( self, - request: compute.GetHealthBackendServiceRequest = None, + request: Union[compute.GetHealthBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, resource_group_reference_resource: compute.ResourceGroupReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceGroupHealth: @@ -793,7 +805,7 @@ def get_health( "/zones/us-east1-b/instanceGroups/lb-backend-example" } Args: - request (google.cloud.compute_v1.types.GetHealthBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.GetHealthBackendServiceRequest, dict]): The request object. A request message for BackendServices.GetHealth. See the method description for details. @@ -865,11 +877,11 @@ def get_health( def insert( self, - request: compute.InsertBackendServiceRequest = None, + request: Union[compute.InsertBackendServiceRequest, dict] = None, *, project: str = None, backend_service_resource: compute.BackendService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -878,7 +890,7 @@ def insert( information, see Backend services overview . Args: - request (google.cloud.compute_v1.types.InsertBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.InsertBackendServiceRequest, dict]): The request object. A request message for BackendServices.Insert. See the method description for details. @@ -952,10 +964,10 @@ def insert( def list( self, - request: compute.ListBackendServicesRequest = None, + request: Union[compute.ListBackendServicesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -963,7 +975,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListBackendServicesRequest): + request (Union[google.cloud.compute_v1.types.ListBackendServicesRequest, dict]): The request object. A request message for BackendServices.List. See the method description for details. @@ -1026,12 +1038,12 @@ def list( def patch( self, - request: compute.PatchBackendServiceRequest = None, + request: Union[compute.PatchBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, backend_service_resource: compute.BackendService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1042,7 +1054,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.PatchBackendServiceRequest, dict]): The request object. A request message for BackendServices.Patch. See the method description for details. @@ -1125,12 +1137,12 @@ def patch( def set_security_policy( self, - request: compute.SetSecurityPolicyBackendServiceRequest = None, + request: Union[compute.SetSecurityPolicyBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, security_policy_reference_resource: compute.SecurityPolicyReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1139,7 +1151,7 @@ def set_security_policy( Google Cloud Armor Overview Args: - request (google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.SetSecurityPolicyBackendServiceRequest, dict]): The request object. A request message for BackendServices.SetSecurityPolicy. See the method description for details. @@ -1227,12 +1239,12 @@ def set_security_policy( def update( self, - request: compute.UpdateBackendServiceRequest = None, + request: Union[compute.UpdateBackendServiceRequest, dict] = None, *, project: str = None, backend_service: str = None, backend_service_resource: compute.BackendService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1241,7 +1253,7 @@ def update( see Backend services overview. Args: - request (google.cloud.compute_v1.types.UpdateBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.UpdateBackendServiceRequest, dict]): The request object. A request message for BackendServices.Update. See the method description for details. @@ -1322,6 +1334,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/backend_services/pagers.py b/google/cloud/compute_v1/services/backend_services/pagers.py index 57c42ca86..def4edd8d 100644 --- a/google/cloud/compute_v1/services/backend_services/pagers.py +++ b/google/cloud/compute_v1/services/backend_services/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.BackendServiceAggregatedList]: + def pages(self) -> Iterator[compute.BackendServiceAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.BackendServicesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.BackendServicesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.BackendServiceList]: + def pages(self) -> Iterator[compute.BackendServiceList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.BackendService]: + def __iter__(self) -> Iterator[compute.BackendService]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/backend_services/transports/base.py b/google/cloud/compute_v1/services/backend_services/transports/base.py index ad5073c95..06724b81d 100644 --- a/google/cloud/compute_v1/services/backend_services/transports/base.py +++ b/google/cloud/compute_v1/services/backend_services/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class BackendServicesTransport(abc.ABC): """Abstract transport class for BackendServices.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -195,6 +159,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_signed_url_key( self, diff --git a/google/cloud/compute_v1/services/backend_services/transports/rest.py b/google/cloud/compute_v1/services/backend_services/transports/rest.py index 816726809..52a7ad6ed 100644 --- a/google/cloud/compute_v1/services/backend_services/transports/rest.py +++ b/google/cloud/compute_v1/services/backend_services/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + BackendServicesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import BackendServicesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class BackendServicesRestTransport(BackendServicesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_signed_url_key( + def _add_signed_url_key( self, request: compute.AddSignedUrlKeyBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add signed url key method over HTTP. @@ -112,6 +139,9 @@ def add_signed_url_key( BackendServices.AddSignedUrlKey. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,32 +165,62 @@ def add_signed_url_key( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}/addSignedUrlKey", + "body": "signed_url_key_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.AddSignedUrlKeyBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SignedUrlKey.to_json( - request.signed_url_key_resource, + compute.SignedUrlKey(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}/addSignedUrlKey".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddSignedUrlKeyBackendServiceRequest.to_json( + compute.AddSignedUrlKeyBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddSignedUrlKeyBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -171,10 +231,12 @@ def add_signed_url_key( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListBackendServicesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceAggregatedList: r"""Call the aggregated list method over HTTP. @@ -185,6 +247,9 @@ def aggregated_list( BackendServices.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -195,35 +260,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/backendServices".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListBackendServicesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListBackendServicesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListBackendServicesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListBackendServicesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListBackendServicesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListBackendServicesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/backendServices", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListBackendServicesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListBackendServicesRequest.to_json( + compute.AggregatedListBackendServicesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -235,10 +319,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -249,6 +335,9 @@ def delete( BackendServices.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -272,24 +361,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteBackendServiceRequest.to_json( + compute.DeleteBackendServiceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -299,10 +417,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_signed_url_key( + def _delete_signed_url_key( self, request: compute.DeleteSignedUrlKeyBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete signed url key method over HTTP. @@ -313,6 +433,9 @@ def delete_signed_url_key( BackendServices.DeleteSignedUrlKey. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -336,25 +459,58 @@ def delete_signed_url_key( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}/deleteSignedUrlKey".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}/deleteSignedUrlKey", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("key_name", "keyName"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteSignedUrlKeyBackendServiceRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSignedUrlKeyBackendServiceRequest.to_json( + compute.DeleteSignedUrlKeyBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["keyName"] = request.key_name - if compute.DeleteSignedUrlKeyBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -364,10 +520,12 @@ def delete_signed_url_key( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendService: r"""Call the get method over HTTP. @@ -378,6 +536,9 @@ def get( BackendServices.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -401,22 +562,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.GetBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetBackendServiceRequest.to_json( + compute.GetBackendServiceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -428,10 +620,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_health( + def _get_health( self, request: compute.GetHealthBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceGroupHealth: r"""Call the get health method over HTTP. @@ -442,6 +636,9 @@ def get_health( BackendServices.GetHealth. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -450,30 +647,62 @@ def get_health( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}/getHealth", + "body": "resource_group_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.GetHealthBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ResourceGroupReference.to_json( - request.resource_group_reference_resource, + compute.ResourceGroupReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}/getHealth".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetHealthBackendServiceRequest.to_json( + compute.GetHealthBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -486,10 +715,12 @@ def get_health( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -500,6 +731,9 @@ def insert( BackendServices.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -523,30 +757,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendServices", + "body": "backend_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendService.to_json( - request.backend_service_resource, + compute.BackendService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertBackendServiceRequest.to_json( + compute.InsertBackendServiceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -557,10 +820,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListBackendServicesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceList: r"""Call the list method over HTTP. @@ -571,6 +836,9 @@ def list( BackendServices.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -581,30 +849,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListBackendServicesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListBackendServicesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListBackendServicesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListBackendServicesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListBackendServicesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendServices", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListBackendServicesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListBackendServicesRequest.to_json( + compute.ListBackendServicesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -616,10 +906,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -630,6 +922,9 @@ def patch( BackendServices.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -653,32 +948,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}", + "body": "backend_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.PatchBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendService.to_json( - request.backend_service_resource, + compute.BackendService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchBackendServiceRequest.to_json( + compute.PatchBackendServiceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -689,10 +1012,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_security_policy( + def _set_security_policy( self, request: compute.SetSecurityPolicyBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set security policy method over HTTP. @@ -703,6 +1028,9 @@ def set_security_policy( BackendServices.SetSecurityPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -726,32 +1054,62 @@ def set_security_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy", + "body": "security_policy_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.SetSecurityPolicyBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SecurityPolicyReference.to_json( - request.security_policy_reference_resource, + compute.SecurityPolicyReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSecurityPolicyBackendServiceRequest.to_json( + compute.SetSecurityPolicyBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetSecurityPolicyBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -762,10 +1120,12 @@ def set_security_policy( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -776,6 +1136,9 @@ def update( BackendServices.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -799,32 +1162,60 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/global/backendServices/{backend_service}", + "body": "backend_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ] + + request_kwargs = compute.UpdateBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendService.to_json( - request.backend_service_resource, + compute.BackendService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateBackendServiceRequest.to_json( + compute.UpdateBackendServiceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -835,5 +1226,79 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_signed_url_key( + self, + ) -> Callable[[compute.AddSignedUrlKeyBackendServiceRequest], compute.Operation]: + return self._add_signed_url_key + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListBackendServicesRequest], + compute.BackendServiceAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteBackendServiceRequest], compute.Operation]: + return self._delete + + @property + def delete_signed_url_key( + self, + ) -> Callable[[compute.DeleteSignedUrlKeyBackendServiceRequest], compute.Operation]: + return self._delete_signed_url_key + + @property + def get( + self, + ) -> Callable[[compute.GetBackendServiceRequest], compute.BackendService]: + return self._get + + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthBackendServiceRequest], compute.BackendServiceGroupHealth + ]: + return self._get_health + + @property + def insert( + self, + ) -> Callable[[compute.InsertBackendServiceRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListBackendServicesRequest], compute.BackendServiceList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchBackendServiceRequest], compute.Operation]: + return self._patch + + @property + def set_security_policy( + self, + ) -> Callable[[compute.SetSecurityPolicyBackendServiceRequest], compute.Operation]: + return self._set_security_policy + + @property + def update( + self, + ) -> Callable[[compute.UpdateBackendServiceRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("BackendServicesRestTransport",) diff --git a/google/cloud/compute_v1/services/disk_types/client.py b/google/cloud/compute_v1/services/disk_types/client.py index 5754aa468..758f38479 100644 --- a/google/cloud/compute_v1/services/disk_types/client.py +++ b/google/cloud/compute_v1/services/disk_types/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.disk_types import pagers from google.cloud.compute_v1.types import compute from .transports.base import DiskTypesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListDiskTypesRequest = None, + request: Union[compute.AggregatedListDiskTypesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of disk types. Args: - request (google.cloud.compute_v1.types.AggregatedListDiskTypesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListDiskTypesRequest, dict]): The request object. A request message for DiskTypes.AggregatedList. See the method description for details. @@ -399,12 +411,12 @@ def aggregated_list( def get( self, - request: compute.GetDiskTypeRequest = None, + request: Union[compute.GetDiskTypeRequest, dict] = None, *, project: str = None, zone: str = None, disk_type: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskType: @@ -412,7 +424,7 @@ def get( available disk types by making a list() request. Args: - request (google.cloud.compute_v1.types.GetDiskTypeRequest): + request (Union[google.cloud.compute_v1.types.GetDiskTypeRequest, dict]): The request object. A request message for DiskTypes.Get. See the method description for details. project (str): @@ -490,11 +502,11 @@ def get( def list( self, - request: compute.ListDiskTypesRequest = None, + request: Union[compute.ListDiskTypesRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -502,7 +514,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListDiskTypesRequest): + request (Union[google.cloud.compute_v1.types.ListDiskTypesRequest, dict]): The request object. A request message for DiskTypes.List. See the method description for details. project (str): @@ -570,6 +582,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/disk_types/pagers.py b/google/cloud/compute_v1/services/disk_types/pagers.py index f501b2b3b..917bc3513 100644 --- a/google/cloud/compute_v1/services/disk_types/pagers.py +++ b/google/cloud/compute_v1/services/disk_types/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.DiskTypeAggregatedList]: + def pages(self) -> Iterator[compute.DiskTypeAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.DiskTypesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.DiskTypesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.DiskTypeList]: + def pages(self) -> Iterator[compute.DiskTypeList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.DiskType]: + def __iter__(self) -> Iterator[compute.DiskType]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/disk_types/transports/base.py b/google/cloud/compute_v1/services/disk_types/transports/base.py index 435a8f2a6..6664aef20 100644 --- a/google/cloud/compute_v1/services/disk_types/transports/base.py +++ b/google/cloud/compute_v1/services/disk_types/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class DiskTypesTransport(abc.ABC): """Abstract transport class for DiskTypes.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -170,6 +134,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/disk_types/transports/rest.py b/google/cloud/compute_v1/services/disk_types/transports/rest.py index 80b1c3be7..37aaf5552 100644 --- a/google/cloud/compute_v1/services/disk_types/transports/rest.py +++ b/google/cloud/compute_v1/services/disk_types/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import DiskTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import DiskTypesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class DiskTypesRestTransport(DiskTypesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListDiskTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskTypeAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( DiskTypes.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/diskTypes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/diskTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListDiskTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListDiskTypesRequest.to_json( + compute.AggregatedListDiskTypesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListDiskTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListDiskTypesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListDiskTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListDiskTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListDiskTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListDiskTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetDiskTypeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskType: r"""Call the get method over HTTP. @@ -170,6 +221,9 @@ def get( The request object. A request message for DiskTypes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -189,23 +243,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/diskTypes/{disk_type}".format( - host=self._host, - project=request.project, - zone=request.zone, - disk_type=request.disk_type, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/diskTypes/{disk_type}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk_type", "diskType"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetDiskTypeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetDiskTypeRequest.to_json( + compute.GetDiskTypeRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -215,10 +300,12 @@ def get( # Return the response return compute.DiskType.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListDiskTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskTypeList: r"""Call the list method over HTTP. @@ -228,6 +315,9 @@ def list( The request object. A request message for DiskTypes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -236,30 +326,53 @@ def list( Contains a list of disk types. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/diskTypes".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/diskTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListDiskTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListDiskTypesRequest.to_json( + compute.ListDiskTypesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListDiskTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListDiskTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListDiskTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListDiskTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListDiskTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -271,5 +384,24 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListDiskTypesRequest], compute.DiskTypeAggregatedList + ]: + return self._aggregated_list + + @property + def get(self) -> Callable[[compute.GetDiskTypeRequest], compute.DiskType]: + return self._get + + @property + def list(self) -> Callable[[compute.ListDiskTypesRequest], compute.DiskTypeList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("DiskTypesRestTransport",) diff --git a/google/cloud/compute_v1/services/disks/client.py b/google/cloud/compute_v1/services/disks/client.py index b7def10ab..baaa614dc 100644 --- a/google/cloud/compute_v1/services/disks/client.py +++ b/google/cloud/compute_v1/services/disks/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.disks import pagers from google.cloud.compute_v1.types import compute from .transports.base import DisksTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,17 +335,18 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_resource_policies( self, - request: compute.AddResourcePoliciesDiskRequest = None, + request: Union[compute.AddResourcePoliciesDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk: str = None, disks_add_resource_policies_request_resource: compute.DisksAddResourcePoliciesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -343,7 +355,7 @@ def add_resource_policies( for scheduling snapshot creation. Args: - request (google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest): + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesDiskRequest, dict]): The request object. A request message for Disks.AddResourcePolicies. See the method description for details. @@ -437,17 +449,17 @@ def add_resource_policies( def aggregated_list( self, - request: compute.AggregatedListDisksRequest = None, + request: Union[compute.AggregatedListDisksRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of persistent disks. Args: - request (google.cloud.compute_v1.types.AggregatedListDisksRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListDisksRequest, dict]): The request object. A request message for Disks.AggregatedList. See the method description for details. @@ -508,20 +520,20 @@ def aggregated_list( def create_snapshot( self, - request: compute.CreateSnapshotDiskRequest = None, + request: Union[compute.CreateSnapshotDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk: str = None, snapshot_resource: compute.Snapshot = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Creates a snapshot of a specified persistent disk. Args: - request (google.cloud.compute_v1.types.CreateSnapshotDiskRequest): + request (Union[google.cloud.compute_v1.types.CreateSnapshotDiskRequest, dict]): The request object. A request message for Disks.CreateSnapshot. See the method description for details. @@ -613,12 +625,12 @@ def create_snapshot( def delete( self, - request: compute.DeleteDiskRequest = None, + request: Union[compute.DeleteDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -629,7 +641,7 @@ def delete( delete snapshots. Args: - request (google.cloud.compute_v1.types.DeleteDiskRequest): + request (Union[google.cloud.compute_v1.types.DeleteDiskRequest, dict]): The request object. A request message for Disks.Delete. See the method description for details. project (str): @@ -713,12 +725,12 @@ def delete( def get( self, - request: compute.GetDiskRequest = None, + request: Union[compute.GetDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Disk: @@ -726,7 +738,7 @@ def get( available persistent disks by making a list() request. Args: - request (google.cloud.compute_v1.types.GetDiskRequest): + request (Union[google.cloud.compute_v1.types.GetDiskRequest, dict]): The request object. A request message for Disks.Get. See the method description for details. project (str): @@ -807,12 +819,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyDiskRequest = None, + request: Union[compute.GetIamPolicyDiskRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -820,7 +832,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyDiskRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyDiskRequest, dict]): The request object. A request message for Disks.GetIamPolicy. See the method description for details. @@ -929,12 +941,12 @@ def get_iam_policy( def insert( self, - request: compute.InsertDiskRequest = None, + request: Union[compute.InsertDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk_resource: compute.Disk = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -947,7 +959,7 @@ def insert( property. Args: - request (google.cloud.compute_v1.types.InsertDiskRequest): + request (Union[google.cloud.compute_v1.types.InsertDiskRequest, dict]): The request object. A request message for Disks.Insert. See the method description for details. project (str): @@ -1029,11 +1041,11 @@ def insert( def list( self, - request: compute.ListDisksRequest = None, + request: Union[compute.ListDisksRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1041,7 +1053,7 @@ def list( the specified zone. Args: - request (google.cloud.compute_v1.types.ListDisksRequest): + request (Union[google.cloud.compute_v1.types.ListDisksRequest, dict]): The request object. A request message for Disks.List. See the method description for details. project (str): @@ -1111,20 +1123,20 @@ def list( def remove_resource_policies( self, - request: compute.RemoveResourcePoliciesDiskRequest = None, + request: Union[compute.RemoveResourcePoliciesDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk: str = None, disks_remove_resource_policies_request_resource: compute.DisksRemoveResourcePoliciesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Removes resource policies from a disk. Args: - request (google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest): + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesDiskRequest, dict]): The request object. A request message for Disks.RemoveResourcePolicies. See the method description for details. @@ -1218,13 +1230,13 @@ def remove_resource_policies( def resize( self, - request: compute.ResizeDiskRequest = None, + request: Union[compute.ResizeDiskRequest, dict] = None, *, project: str = None, zone: str = None, disk: str = None, disks_resize_request_resource: compute.DisksResizeRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1232,7 +1244,7 @@ def resize( increase the size of the disk. Args: - request (google.cloud.compute_v1.types.ResizeDiskRequest): + request (Union[google.cloud.compute_v1.types.ResizeDiskRequest, dict]): The request object. A request message for Disks.Resize. See the method description for details. project (str): @@ -1321,13 +1333,13 @@ def resize( def set_iam_policy( self, - request: compute.SetIamPolicyDiskRequest = None, + request: Union[compute.SetIamPolicyDiskRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, zone_set_policy_request_resource: compute.ZoneSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1335,7 +1347,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyDiskRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyDiskRequest, dict]): The request object. A request message for Disks.SetIamPolicy. See the method description for details. @@ -1455,13 +1467,13 @@ def set_iam_policy( def set_labels( self, - request: compute.SetLabelsDiskRequest = None, + request: Union[compute.SetLabelsDiskRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, zone_set_labels_request_resource: compute.ZoneSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1469,7 +1481,7 @@ def set_labels( labels, read the Labeling Resources documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsDiskRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsDiskRequest, dict]): The request object. A request message for Disks.SetLabels. See the method description for details. project (str): @@ -1564,13 +1576,13 @@ def set_labels( def test_iam_permissions( self, - request: compute.TestIamPermissionsDiskRequest = None, + request: Union[compute.TestIamPermissionsDiskRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1578,7 +1590,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsDiskRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsDiskRequest, dict]): The request object. A request message for Disks.TestIamPermissions. See the method description for details. @@ -1657,6 +1669,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/disks/pagers.py b/google/cloud/compute_v1/services/disks/pagers.py index c5c4a11b6..b2116db90 100644 --- a/google/cloud/compute_v1/services/disks/pagers.py +++ b/google/cloud/compute_v1/services/disks/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.DiskAggregatedList]: + def pages(self) -> Iterator[compute.DiskAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.DisksScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.DisksScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.DiskList]: + def pages(self) -> Iterator[compute.DiskList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Disk]: + def __iter__(self) -> Iterator[compute.Disk]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/disks/transports/base.py b/google/cloud/compute_v1/services/disks/transports/base.py index f089ac23e..f40fd6fc8 100644 --- a/google/cloud/compute_v1/services/disks/transports/base.py +++ b/google/cloud/compute_v1/services/disks/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class DisksTransport(abc.ABC): """Abstract transport class for Disks.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -205,6 +169,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_resource_policies( self, diff --git a/google/cloud/compute_v1/services/disks/transports/rest.py b/google/cloud/compute_v1/services/disks/transports/rest.py index 93dac1f99..893765324 100644 --- a/google/cloud/compute_v1/services/disks/transports/rest.py +++ b/google/cloud/compute_v1/services/disks/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import DisksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import DisksTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class DisksRestTransport(DisksTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_resource_policies( + def _add_resource_policies( self, request: compute.AddResourcePoliciesDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add resource policies method over HTTP. @@ -112,6 +136,9 @@ def add_resource_policies( Disks.AddResourcePolicies. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,33 +162,63 @@ def add_resource_policies( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/addResourcePolicies", + "body": "disks_add_resource_policies_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AddResourcePoliciesDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.DisksAddResourcePoliciesRequest.to_json( - request.disks_add_resource_policies_request_resource, + compute.DisksAddResourcePoliciesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/addResourcePolicies".format( - host=self._host, - project=request.project, - zone=request.zone, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddResourcePoliciesDiskRequest.to_json( + compute.AddResourcePoliciesDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddResourcePoliciesDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -172,10 +229,12 @@ def add_resource_policies( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListDisksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskAggregatedList: r"""Call the aggregated list method over HTTP. @@ -186,6 +245,9 @@ def aggregated_list( Disks.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,32 +256,52 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/disks".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/disks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListDisksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListDisksRequest.to_json( + compute.AggregatedListDisksRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListDisksRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListDisksRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListDisksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListDisksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListDisksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListDisksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -231,10 +313,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def create_snapshot( + def _create_snapshot( self, request: compute.CreateSnapshotDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the create snapshot method over HTTP. @@ -245,6 +329,9 @@ def create_snapshot( Disks.CreateSnapshot. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -268,35 +355,61 @@ def create_snapshot( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/createSnapshot", + "body": "snapshot_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.CreateSnapshotDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Snapshot.to_json( - request.snapshot_resource, + compute.Snapshot(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/createSnapshot".format( - host=self._host, - project=request.project, - zone=request.zone, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.CreateSnapshotDiskRequest.to_json( + compute.CreateSnapshotDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.CreateSnapshotDiskRequest.guest_flush in request: - query_params["guestFlush"] = request.guest_flush - if compute.CreateSnapshotDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -307,10 +420,12 @@ def create_snapshot( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -320,6 +435,9 @@ def delete( The request object. A request message for Disks.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -343,25 +461,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{disk}".format( - host=self._host, - project=request.project, - zone=request.zone, - disk=request.disk, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteDiskRequest.to_json( + compute.DeleteDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -371,10 +518,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Disk: r"""Call the get method over HTTP. @@ -384,6 +533,9 @@ def get( The request object. A request message for Disks.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -404,23 +556,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{disk}".format( - host=self._host, - project=request.project, - zone=request.zone, - disk=request.disk, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetDiskRequest.to_json( + compute.GetDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -430,10 +613,12 @@ def get( # Return the response return compute.Disk.from_json(response.content, ignore_unknown_fields=True) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -444,6 +629,9 @@ def get_iam_policy( Disks.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -492,27 +680,54 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetIamPolicyDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyDiskRequest.to_json( + compute.GetIamPolicyDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetIamPolicyDiskRequest.options_requested_policy_version in request: - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -522,10 +737,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -535,6 +752,9 @@ def insert( The request object. A request message for Disks.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -558,32 +778,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks", + "body": "disk_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Disk.to_json( - request.disk_resource, + compute.Disk(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertDiskRequest.to_json( + compute.InsertDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertDiskRequest.request_id in request: - query_params["requestId"] = request.request_id - if compute.InsertDiskRequest.source_image in request: - query_params["sourceImage"] = request.source_image + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -594,10 +842,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListDisksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskList: r"""Call the list method over HTTP. @@ -607,6 +857,9 @@ def list( The request object. A request message for Disks.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -615,30 +868,53 @@ def list( A list of Disk resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListDisksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListDisksRequest.to_json( + compute.ListDisksRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListDisksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListDisksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListDisksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListDisksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListDisksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -648,10 +924,12 @@ def list( # Return the response return compute.DiskList.from_json(response.content, ignore_unknown_fields=True) - def remove_resource_policies( + def _remove_resource_policies( self, request: compute.RemoveResourcePoliciesDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove resource policies method over HTTP. @@ -662,6 +940,9 @@ def remove_resource_policies( Disks.RemoveResourcePolicies. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -685,33 +966,63 @@ def remove_resource_policies( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/removeResourcePolicies", + "body": "disks_remove_resource_policies_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.RemoveResourcePoliciesDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.DisksRemoveResourcePoliciesRequest.to_json( - request.disks_remove_resource_policies_request_resource, + compute.DisksRemoveResourcePoliciesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/removeResourcePolicies".format( - host=self._host, - project=request.project, - zone=request.zone, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveResourcePoliciesDiskRequest.to_json( + compute.RemoveResourcePoliciesDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveResourcePoliciesDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -722,10 +1033,12 @@ def remove_resource_policies( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def resize( + def _resize( self, request: compute.ResizeDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the resize method over HTTP. @@ -735,6 +1048,9 @@ def resize( The request object. A request message for Disks.Resize. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -758,33 +1074,61 @@ def resize( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/resize", + "body": "disks_resize_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ResizeDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.DisksResizeRequest.to_json( - request.disks_resize_request_resource, + compute.DisksResizeRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/resize".format( - host=self._host, - project=request.project, - zone=request.zone, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ResizeDiskRequest.to_json( + compute.ResizeDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ResizeDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -795,10 +1139,12 @@ def resize( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -809,6 +1155,9 @@ def set_iam_policy( Disks.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -857,31 +1206,61 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setIamPolicy", + "body": "zone_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetIamPolicyDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ZoneSetPolicyRequest.to_json( - request.zone_set_policy_request_resource, + compute.ZoneSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyDiskRequest.to_json( + compute.SetIamPolicyDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -892,10 +1271,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -906,6 +1287,9 @@ def set_labels( Disks.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -929,33 +1313,61 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setLabels", + "body": "zone_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetLabelsDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ZoneSetLabelsRequest.to_json( - request.zone_set_labels_request_resource, + compute.ZoneSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setLabels".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsDiskRequest.to_json( + compute.SetLabelsDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetLabelsDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -966,10 +1378,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -980,6 +1394,9 @@ def test_iam_permissions( Disks.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -988,31 +1405,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.TestIamPermissionsDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsDiskRequest.to_json( + compute.TestIamPermissionsDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1025,5 +1474,76 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def add_resource_policies( + self, + ) -> Callable[[compute.AddResourcePoliciesDiskRequest], compute.Operation]: + return self._add_resource_policies + + @property + def aggregated_list( + self, + ) -> Callable[[compute.AggregatedListDisksRequest], compute.DiskAggregatedList]: + return self._aggregated_list + + @property + def create_snapshot( + self, + ) -> Callable[[compute.CreateSnapshotDiskRequest], compute.Operation]: + return self._create_snapshot + + @property + def delete(self) -> Callable[[compute.DeleteDiskRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetDiskRequest], compute.Disk]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyDiskRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertDiskRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListDisksRequest], compute.DiskList]: + return self._list + + @property + def remove_resource_policies( + self, + ) -> Callable[[compute.RemoveResourcePoliciesDiskRequest], compute.Operation]: + return self._remove_resource_policies + + @property + def resize(self) -> Callable[[compute.ResizeDiskRequest], compute.Operation]: + return self._resize + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyDiskRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_labels(self) -> Callable[[compute.SetLabelsDiskRequest], compute.Operation]: + return self._set_labels + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsDiskRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("DisksRestTransport",) diff --git a/google/cloud/compute_v1/services/external_vpn_gateways/client.py b/google/cloud/compute_v1/services/external_vpn_gateways/client.py index a7f7d1ecc..4ff2ff4a0 100644 --- a/google/cloud/compute_v1/services/external_vpn_gateways/client.py +++ b/google/cloud/compute_v1/services/external_vpn_gateways/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.external_vpn_gateways import pagers from google.cloud.compute_v1.types import compute from .transports.base import ExternalVpnGatewaysTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,22 +339,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteExternalVpnGatewayRequest = None, + request: Union[compute.DeleteExternalVpnGatewayRequest, dict] = None, *, project: str = None, external_vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified externalVpnGateway. Args: - request (google.cloud.compute_v1.types.DeleteExternalVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.DeleteExternalVpnGatewayRequest, dict]): The request object. A request message for ExternalVpnGateways.Delete. See the method description for details. @@ -419,11 +431,11 @@ def delete( def get( self, - request: compute.GetExternalVpnGatewayRequest = None, + request: Union[compute.GetExternalVpnGatewayRequest, dict] = None, *, project: str = None, external_vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ExternalVpnGateway: @@ -432,7 +444,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetExternalVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.GetExternalVpnGatewayRequest, dict]): The request object. A request message for ExternalVpnGateways.Get. See the method description for details. @@ -507,11 +519,11 @@ def get( def insert( self, - request: compute.InsertExternalVpnGatewayRequest = None, + request: Union[compute.InsertExternalVpnGatewayRequest, dict] = None, *, project: str = None, external_vpn_gateway_resource: compute.ExternalVpnGateway = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -519,7 +531,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertExternalVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.InsertExternalVpnGatewayRequest, dict]): The request object. A request message for ExternalVpnGateways.Insert. See the method description for details. @@ -593,10 +605,10 @@ def insert( def list( self, - request: compute.ListExternalVpnGatewaysRequest = None, + request: Union[compute.ListExternalVpnGatewaysRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -604,7 +616,7 @@ def list( the specified project. Args: - request (google.cloud.compute_v1.types.ListExternalVpnGatewaysRequest): + request (Union[google.cloud.compute_v1.types.ListExternalVpnGatewaysRequest, dict]): The request object. A request message for ExternalVpnGateways.List. See the method description for details. @@ -667,12 +679,12 @@ def list( def set_labels( self, - request: compute.SetLabelsExternalVpnGatewayRequest = None, + request: Union[compute.SetLabelsExternalVpnGatewayRequest, dict] = None, *, project: str = None, resource: str = None, global_set_labels_request_resource: compute.GlobalSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -681,7 +693,7 @@ def set_labels( documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsExternalVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsExternalVpnGatewayRequest, dict]): The request object. A request message for ExternalVpnGateways.SetLabels. See the method description for details. @@ -768,12 +780,14 @@ def set_labels( def test_iam_permissions( self, - request: compute.TestIamPermissionsExternalVpnGatewayRequest = None, + request: Union[ + compute.TestIamPermissionsExternalVpnGatewayRequest, dict + ] = None, *, project: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -781,7 +795,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsExternalVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsExternalVpnGatewayRequest, dict]): The request object. A request message for ExternalVpnGateways.TestIamPermissions. See the method description for details. @@ -851,6 +865,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py b/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py index a3c76c923..4d0ba0e3a 100644 --- a/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py +++ b/google/cloud/compute_v1/services/external_vpn_gateways/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ExternalVpnGatewayList]: + def pages(self) -> Iterator[compute.ExternalVpnGatewayList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ExternalVpnGateway]: + def __iter__(self) -> Iterator[compute.ExternalVpnGateway]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py b/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py index b7b5e8a61..272832c68 100644 --- a/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py +++ b/google/cloud/compute_v1/services/external_vpn_gateways/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ExternalVpnGatewaysTransport(abc.ABC): """Abstract transport class for ExternalVpnGateways.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -180,6 +144,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py b/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py index d937a889a..5a0d886c6 100644 --- a/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py +++ b/google/cloud/compute_v1/services/external_vpn_gateways/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + ExternalVpnGatewaysTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import ExternalVpnGatewaysTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ExternalVpnGatewaysRestTransport(ExternalVpnGatewaysTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteExternalVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( ExternalVpnGateways.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}".format( - host=self._host, - project=request.project, - external_vpn_gateway=request.external_vpn_gateway, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("external_vpn_gateway", "externalVpnGateway"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteExternalVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteExternalVpnGatewayRequest.to_json( + compute.DeleteExternalVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteExternalVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +223,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetExternalVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ExternalVpnGateway: r"""Call the get method over HTTP. @@ -176,6 +239,9 @@ def get( ExternalVpnGateways.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -198,22 +264,55 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}".format( - host=self._host, - project=request.project, - external_vpn_gateway=request.external_vpn_gateway, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("external_vpn_gateway", "externalVpnGateway"), + ("project", "project"), + ] + + request_kwargs = compute.GetExternalVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetExternalVpnGatewayRequest.to_json( + compute.GetExternalVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +324,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertExternalVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -239,6 +340,9 @@ def insert( ExternalVpnGateways.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -262,30 +366,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/externalVpnGateways", + "body": "external_vpn_gateway_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertExternalVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ExternalVpnGateway.to_json( - request.external_vpn_gateway_resource, + compute.ExternalVpnGateway(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/externalVpnGateways".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertExternalVpnGatewayRequest.to_json( + compute.InsertExternalVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertExternalVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -296,10 +431,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListExternalVpnGatewaysRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ExternalVpnGatewayList: r"""Call the list method over HTTP. @@ -310,6 +447,9 @@ def list( ExternalVpnGateways.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -320,30 +460,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/externalVpnGateways".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/externalVpnGateways", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListExternalVpnGatewaysRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListExternalVpnGatewaysRequest.to_json( + compute.ListExternalVpnGatewaysRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListExternalVpnGatewaysRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListExternalVpnGatewaysRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListExternalVpnGatewaysRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListExternalVpnGatewaysRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListExternalVpnGatewaysRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -355,10 +519,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_labels( + def _set_labels( self, request: compute.SetLabelsExternalVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -369,6 +535,9 @@ def set_labels( ExternalVpnGateways.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -392,28 +561,62 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/setLabels", + "body": "global_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsExternalVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetLabelsRequest.to_json( - request.global_set_labels_request_resource, + compute.GlobalSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/setLabels".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsExternalVpnGatewayRequest.to_json( + compute.SetLabelsExternalVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -424,10 +627,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsExternalVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -438,6 +643,9 @@ def test_iam_permissions( ExternalVpnGateways.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -446,28 +654,64 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsExternalVpnGatewayRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/testIamPermissions".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsExternalVpnGatewayRequest.to_json( + compute.TestIamPermissionsExternalVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -480,5 +724,49 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteExternalVpnGatewayRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetExternalVpnGatewayRequest], compute.ExternalVpnGateway]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertExternalVpnGatewayRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListExternalVpnGatewaysRequest], compute.ExternalVpnGatewayList + ]: + return self._list + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsExternalVpnGatewayRequest], compute.Operation]: + return self._set_labels + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsExternalVpnGatewayRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("ExternalVpnGatewaysRestTransport",) diff --git a/google/cloud/compute_v1/services/firewall_policies/client.py b/google/cloud/compute_v1/services/firewall_policies/client.py index 84f8b2394..9de6ec5f7 100644 --- a/google/cloud/compute_v1/services/firewall_policies/client.py +++ b/google/cloud/compute_v1/services/firewall_policies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.firewall_policies import pagers from google.cloud.compute_v1.types import compute from .transports.base import FirewallPoliciesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,15 +337,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_association( self, - request: compute.AddAssociationFirewallPolicyRequest = None, + request: Union[compute.AddAssociationFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, firewall_policy_association_resource: compute.FirewallPolicyAssociation = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -342,7 +354,7 @@ def add_association( policy. Args: - request (google.cloud.compute_v1.types.AddAssociationFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.AddAssociationFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.AddAssociation. See the method description for details. @@ -422,18 +434,18 @@ def add_association( def add_rule( self, - request: compute.AddRuleFirewallPolicyRequest = None, + request: Union[compute.AddRuleFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, firewall_policy_rule_resource: compute.FirewallPolicyRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Inserts a rule into a firewall policy. Args: - request (google.cloud.compute_v1.types.AddRuleFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.AddRuleFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.AddRule. See the method description for details. @@ -509,17 +521,17 @@ def add_rule( def clone_rules( self, - request: compute.CloneRulesFirewallPolicyRequest = None, + request: Union[compute.CloneRulesFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Copies rules to the specified firewall policy. Args: - request (google.cloud.compute_v1.types.CloneRulesFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.CloneRulesFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.CloneRules. See the method description for details. @@ -588,17 +600,17 @@ def clone_rules( def delete( self, - request: compute.DeleteFirewallPolicyRequest = None, + request: Union[compute.DeleteFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified policy. Args: - request (google.cloud.compute_v1.types.DeleteFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.DeleteFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.Delete. See the method description for details. @@ -667,17 +679,17 @@ def delete( def get( self, - request: compute.GetFirewallPolicyRequest = None, + request: Union[compute.GetFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicy: r"""Returns the specified firewall policy. Args: - request (google.cloud.compute_v1.types.GetFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.Get. See the method description for details. @@ -731,17 +743,17 @@ def get( def get_association( self, - request: compute.GetAssociationFirewallPolicyRequest = None, + request: Union[compute.GetAssociationFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicyAssociation: r"""Gets an association with the specified name. Args: - request (google.cloud.compute_v1.types.GetAssociationFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetAssociationFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.GetAssociation. See the method description for details. @@ -795,10 +807,10 @@ def get_association( def get_iam_policy( self, - request: compute.GetIamPolicyFirewallPolicyRequest = None, + request: Union[compute.GetIamPolicyFirewallPolicyRequest, dict] = None, *, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -806,7 +818,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.GetIamPolicy. See the method description for details. @@ -899,17 +911,17 @@ def get_iam_policy( def get_rule( self, - request: compute.GetRuleFirewallPolicyRequest = None, + request: Union[compute.GetRuleFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicyRule: r"""Gets a rule of the specified priority. Args: - request (google.cloud.compute_v1.types.GetRuleFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetRuleFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.GetRule. See the method description for details. @@ -967,10 +979,10 @@ def get_rule( def insert( self, - request: compute.InsertFirewallPolicyRequest = None, + request: Union[compute.InsertFirewallPolicyRequest, dict] = None, *, firewall_policy_resource: compute.FirewallPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -978,7 +990,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.InsertFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.Insert. See the method description for details. @@ -1045,9 +1057,9 @@ def insert( def list( self, - request: compute.ListFirewallPoliciesRequest = None, + request: Union[compute.ListFirewallPoliciesRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1055,7 +1067,7 @@ def list( the specified folder or organization. Args: - request (google.cloud.compute_v1.types.ListFirewallPoliciesRequest): + request (Union[google.cloud.compute_v1.types.ListFirewallPoliciesRequest, dict]): The request object. A request message for FirewallPolicies.List. See the method description for details. @@ -1098,9 +1110,9 @@ def list( def list_associations( self, - request: compute.ListAssociationsFirewallPolicyRequest = None, + request: Union[compute.ListAssociationsFirewallPolicyRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPoliciesListAssociationsResponse: @@ -1108,7 +1120,7 @@ def list_associations( organization or folder. Args: - request (google.cloud.compute_v1.types.ListAssociationsFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.ListAssociationsFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.ListAssociations. See the method description for details. @@ -1142,17 +1154,17 @@ def list_associations( def move( self, - request: compute.MoveFirewallPolicyRequest = None, + request: Union[compute.MoveFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Moves the specified firewall policy. Args: - request (google.cloud.compute_v1.types.MoveFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.MoveFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.Move. See the method description for details. @@ -1221,11 +1233,11 @@ def move( def patch( self, - request: compute.PatchFirewallPolicyRequest = None, + request: Union[compute.PatchFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, firewall_policy_resource: compute.FirewallPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1233,7 +1245,7 @@ def patch( in the request. Args: - request (google.cloud.compute_v1.types.PatchFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.PatchFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.Patch. See the method description for details. @@ -1309,18 +1321,18 @@ def patch( def patch_rule( self, - request: compute.PatchRuleFirewallPolicyRequest = None, + request: Union[compute.PatchRuleFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, firewall_policy_rule_resource: compute.FirewallPolicyRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Patches a rule of the specified priority. Args: - request (google.cloud.compute_v1.types.PatchRuleFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.PatchRuleFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.PatchRule. See the method description for details. @@ -1396,10 +1408,10 @@ def patch_rule( def remove_association( self, - request: compute.RemoveAssociationFirewallPolicyRequest = None, + request: Union[compute.RemoveAssociationFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1407,7 +1419,7 @@ def remove_association( policy. Args: - request (google.cloud.compute_v1.types.RemoveAssociationFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.RemoveAssociationFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.RemoveAssociation. See the method description for details. @@ -1476,17 +1488,17 @@ def remove_association( def remove_rule( self, - request: compute.RemoveRuleFirewallPolicyRequest = None, + request: Union[compute.RemoveRuleFirewallPolicyRequest, dict] = None, *, firewall_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes a rule of the specified priority. Args: - request (google.cloud.compute_v1.types.RemoveRuleFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.RemoveRuleFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.RemoveRule. See the method description for details. @@ -1555,11 +1567,11 @@ def remove_rule( def set_iam_policy( self, - request: compute.SetIamPolicyFirewallPolicyRequest = None, + request: Union[compute.SetIamPolicyFirewallPolicyRequest, dict] = None, *, resource: str = None, global_organization_set_policy_request_resource: compute.GlobalOrganizationSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1567,7 +1579,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.SetIamPolicy. See the method description for details. @@ -1671,11 +1683,11 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsFirewallPolicyRequest = None, + request: Union[compute.TestIamPermissionsFirewallPolicyRequest, dict] = None, *, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1683,7 +1695,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsFirewallPolicyRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsFirewallPolicyRequest, dict]): The request object. A request message for FirewallPolicies.TestIamPermissions. See the method description for details. @@ -1744,6 +1756,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/firewall_policies/pagers.py b/google/cloud/compute_v1/services/firewall_policies/pagers.py index df2a6b66c..a75f203df 100644 --- a/google/cloud/compute_v1/services/firewall_policies/pagers.py +++ b/google/cloud/compute_v1/services/firewall_policies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.FirewallPolicyList]: + def pages(self) -> Iterator[compute.FirewallPolicyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.FirewallPolicy]: + def __iter__(self) -> Iterator[compute.FirewallPolicy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/firewall_policies/transports/base.py b/google/cloud/compute_v1/services/firewall_policies/transports/base.py index 44b407fc5..00cebf8e1 100644 --- a/google/cloud/compute_v1/services/firewall_policies/transports/base.py +++ b/google/cloud/compute_v1/services/firewall_policies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class FirewallPoliciesTransport(abc.ABC): """Abstract transport class for FirewallPolicies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -216,6 +180,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_association( self, diff --git a/google/cloud/compute_v1/services/firewall_policies/transports/rest.py b/google/cloud/compute_v1/services/firewall_policies/transports/rest.py index 1e637baf1..d74033c18 100644 --- a/google/cloud/compute_v1/services/firewall_policies/transports/rest.py +++ b/google/cloud/compute_v1/services/firewall_policies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + FirewallPoliciesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import FirewallPoliciesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class FirewallPoliciesRestTransport(FirewallPoliciesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_association( + def _add_association( self, request: compute.AddAssociationFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add association method over HTTP. @@ -112,6 +139,9 @@ def add_association( FirewallPolicies.AddAssociation. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,37 +165,61 @@ def add_association( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addAssociation", + "body": "firewall_policy_association_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.AddAssociationFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.FirewallPolicyAssociation.to_json( - request.firewall_policy_association_resource, + compute.FirewallPolicyAssociation(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addAssociation".format( - host=self._host, firewall_policy=request.firewall_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddAssociationFirewallPolicyRequest.to_json( + compute.AddAssociationFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.AddAssociationFirewallPolicyRequest.replace_existing_association - in request - ): - query_params[ - "replaceExistingAssociation" - ] = request.replace_existing_association - if compute.AddAssociationFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -176,10 +230,12 @@ def add_association( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def add_rule( + def _add_rule( self, request: compute.AddRuleFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add rule method over HTTP. @@ -190,6 +246,9 @@ def add_rule( FirewallPolicies.AddRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -213,30 +272,61 @@ def add_rule( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addRule", + "body": "firewall_policy_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.AddRuleFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.FirewallPolicyRule.to_json( - request.firewall_policy_rule_resource, + compute.FirewallPolicyRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addRule".format( - host=self._host, firewall_policy=request.firewall_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddRuleFirewallPolicyRequest.to_json( + compute.AddRuleFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddRuleFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -247,10 +337,12 @@ def add_rule( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def clone_rules( + def _clone_rules( self, request: compute.CloneRulesFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the clone rules method over HTTP. @@ -261,6 +353,9 @@ def clone_rules( FirewallPolicies.CloneRules. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -284,24 +379,54 @@ def clone_rules( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/cloneRules".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/cloneRules", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.CloneRulesFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.CloneRulesFirewallPolicyRequest.to_json( + compute.CloneRulesFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.CloneRulesFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id - if compute.CloneRulesFirewallPolicyRequest.source_firewall_policy in request: - query_params["sourceFirewallPolicy"] = request.source_firewall_policy + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -311,10 +436,12 @@ def clone_rules( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -325,6 +452,9 @@ def delete( FirewallPolicies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -348,22 +478,52 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.DeleteFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteFirewallPolicyRequest.to_json( + compute.DeleteFirewallPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -373,10 +533,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicy: r"""Call the get method over HTTP. @@ -387,6 +549,9 @@ def get( FirewallPolicies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -397,20 +562,52 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.GetFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetFirewallPolicyRequest.to_json( + compute.GetFirewallPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -422,10 +619,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_association( + def _get_association( self, request: compute.GetAssociationFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicyAssociation: r"""Call the get association method over HTTP. @@ -436,6 +635,9 @@ def get_association( FirewallPolicies.GetAssociation. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -444,22 +646,54 @@ def get_association( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getAssociation".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getAssociation", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.GetAssociationFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetAssociationFirewallPolicyRequest.to_json( + compute.GetAssociationFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetAssociationFirewallPolicyRequest.name in request: - query_params["name"] = request.name + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -471,10 +705,12 @@ def get_association( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -485,6 +721,9 @@ def get_iam_policy( FirewallPolicies.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -533,27 +772,54 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{resource}/getIamPolicy".format( - host=self._host, resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/locations/global/firewallPolicies/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyFirewallPolicyRequest.to_json( + compute.GetIamPolicyFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyFirewallPolicyRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -563,10 +829,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def get_rule( + def _get_rule( self, request: compute.GetRuleFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicyRule: r"""Call the get rule method over HTTP. @@ -577,6 +845,9 @@ def get_rule( FirewallPolicies.GetRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -589,22 +860,54 @@ def get_rule( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getRule".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getRule", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.GetRuleFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRuleFirewallPolicyRequest.to_json( + compute.GetRuleFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetRuleFirewallPolicyRequest.priority in request: - query_params["priority"] = request.priority + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -616,10 +919,12 @@ def get_rule( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -630,6 +935,9 @@ def insert( FirewallPolicies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -653,32 +961,58 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies", + "body": "firewall_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ] + + request_kwargs = compute.InsertFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.FirewallPolicy.to_json( - request.firewall_policy_resource, + compute.FirewallPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies".format( - host=self._host, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertFirewallPolicyRequest.to_json( + compute.InsertFirewallPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertFirewallPolicyRequest.parent_id in request: - query_params["parentId"] = request.parent_id - if compute.InsertFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -689,10 +1023,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListFirewallPoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPolicyList: r"""Call the list method over HTTP. @@ -703,6 +1039,9 @@ def list( FirewallPolicies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -711,32 +1050,35 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies".format( - host=self._host, - ) + http_options = [ + {"method": "get", "uri": "/compute/v1/locations/global/firewallPolicies",}, + ] + + request_kwargs = compute.ListFirewallPoliciesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListFirewallPoliciesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListFirewallPoliciesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListFirewallPoliciesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListFirewallPoliciesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListFirewallPoliciesRequest.parent_id in request: - query_params["parentId"] = request.parent_id - if compute.ListFirewallPoliciesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Jsonify the query params + query_params = json.loads( + compute.ListFirewallPoliciesRequest.to_json( + compute.ListFirewallPoliciesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -748,10 +1090,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_associations( + def _list_associations( self, request: compute.ListAssociationsFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallPoliciesListAssociationsResponse: r"""Call the list associations method over HTTP. @@ -762,6 +1106,9 @@ def list_associations( FirewallPolicies.ListAssociations. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -770,22 +1117,40 @@ def list_associations( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/listAssociations".format( - host=self._host, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/locations/global/firewallPolicies/listAssociations", + }, + ] + + request_kwargs = compute.ListAssociationsFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListAssociationsFirewallPolicyRequest.to_json( + compute.ListAssociationsFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListAssociationsFirewallPolicyRequest.target_resource in request: - query_params["targetResource"] = request.target_resource - # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -797,10 +1162,12 @@ def list_associations( response.content, ignore_unknown_fields=True ) - def move( + def _move( self, request: compute.MoveFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the move method over HTTP. @@ -811,6 +1178,9 @@ def move( FirewallPolicies.Move. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -834,24 +1204,52 @@ def move( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/move".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/move", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.MoveFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.MoveFirewallPolicyRequest.to_json( + compute.MoveFirewallPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.MoveFirewallPolicyRequest.parent_id in request: - query_params["parentId"] = request.parent_id - if compute.MoveFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -861,10 +1259,12 @@ def move( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def patch( + def _patch( self, request: compute.PatchFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -875,6 +1275,9 @@ def patch( FirewallPolicies.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -898,30 +1301,59 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}", + "body": "firewall_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.PatchFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.FirewallPolicy.to_json( - request.firewall_policy_resource, + compute.FirewallPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}".format( - host=self._host, firewall_policy=request.firewall_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchFirewallPolicyRequest.to_json( + compute.PatchFirewallPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -932,10 +1364,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def patch_rule( + def _patch_rule( self, request: compute.PatchRuleFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch rule method over HTTP. @@ -946,6 +1380,9 @@ def patch_rule( FirewallPolicies.PatchRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -969,32 +1406,61 @@ def patch_rule( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/patchRule", + "body": "firewall_policy_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.PatchRuleFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.FirewallPolicyRule.to_json( - request.firewall_policy_rule_resource, + compute.FirewallPolicyRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/patchRule".format( - host=self._host, firewall_policy=request.firewall_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRuleFirewallPolicyRequest.to_json( + compute.PatchRuleFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRuleFirewallPolicyRequest.priority in request: - query_params["priority"] = request.priority - if compute.PatchRuleFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1005,10 +1471,12 @@ def patch_rule( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def remove_association( + def _remove_association( self, request: compute.RemoveAssociationFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove association method over HTTP. @@ -1019,6 +1487,9 @@ def remove_association( FirewallPolicies.RemoveAssociation. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1042,24 +1513,54 @@ def remove_association( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeAssociation".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeAssociation", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.RemoveAssociationFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveAssociationFirewallPolicyRequest.to_json( + compute.RemoveAssociationFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveAssociationFirewallPolicyRequest.name in request: - query_params["name"] = request.name - if compute.RemoveAssociationFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1069,10 +1570,12 @@ def remove_association( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def remove_rule( + def _remove_rule( self, request: compute.RemoveRuleFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove rule method over HTTP. @@ -1083,6 +1586,9 @@ def remove_rule( FirewallPolicies.RemoveRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1106,24 +1612,54 @@ def remove_rule( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeRule".format( - host=self._host, firewall_policy=request.firewall_policy, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeRule", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall_policy", "firewallPolicy"), + ] + + request_kwargs = compute.RemoveRuleFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveRuleFirewallPolicyRequest.to_json( + compute.RemoveRuleFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveRuleFirewallPolicyRequest.priority in request: - query_params["priority"] = request.priority - if compute.RemoveRuleFirewallPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1133,10 +1669,12 @@ def remove_rule( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -1147,6 +1685,9 @@ def set_iam_policy( FirewallPolicies.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1195,28 +1736,61 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{resource}/setIamPolicy", + "body": "global_organization_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyFirewallPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalOrganizationSetPolicyRequest.to_json( - request.global_organization_set_policy_request_resource, + compute.GlobalOrganizationSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{resource}/setIamPolicy".format( - host=self._host, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyFirewallPolicyRequest.to_json( + compute.SetIamPolicyFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1227,10 +1801,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsFirewallPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -1241,6 +1817,9 @@ def test_iam_permissions( FirewallPolicies.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1249,28 +1828,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/locations/global/firewallPolicies/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsFirewallPolicyRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/firewallPolicies/{resource}/testIamPermissions".format( - host=self._host, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsFirewallPolicyRequest.to_json( + compute.TestIamPermissionsFirewallPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1283,5 +1897,122 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def add_association( + self, + ) -> Callable[[compute.AddAssociationFirewallPolicyRequest], compute.Operation]: + return self._add_association + + @property + def add_rule( + self, + ) -> Callable[[compute.AddRuleFirewallPolicyRequest], compute.Operation]: + return self._add_rule + + @property + def clone_rules( + self, + ) -> Callable[[compute.CloneRulesFirewallPolicyRequest], compute.Operation]: + return self._clone_rules + + @property + def delete( + self, + ) -> Callable[[compute.DeleteFirewallPolicyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetFirewallPolicyRequest], compute.FirewallPolicy]: + return self._get + + @property + def get_association( + self, + ) -> Callable[ + [compute.GetAssociationFirewallPolicyRequest], compute.FirewallPolicyAssociation + ]: + return self._get_association + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyFirewallPolicyRequest], compute.Policy]: + return self._get_iam_policy + + @property + def get_rule( + self, + ) -> Callable[[compute.GetRuleFirewallPolicyRequest], compute.FirewallPolicyRule]: + return self._get_rule + + @property + def insert( + self, + ) -> Callable[[compute.InsertFirewallPolicyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListFirewallPoliciesRequest], compute.FirewallPolicyList]: + return self._list + + @property + def list_associations( + self, + ) -> Callable[ + [compute.ListAssociationsFirewallPolicyRequest], + compute.FirewallPoliciesListAssociationsResponse, + ]: + return self._list_associations + + @property + def move(self) -> Callable[[compute.MoveFirewallPolicyRequest], compute.Operation]: + return self._move + + @property + def patch( + self, + ) -> Callable[[compute.PatchFirewallPolicyRequest], compute.Operation]: + return self._patch + + @property + def patch_rule( + self, + ) -> Callable[[compute.PatchRuleFirewallPolicyRequest], compute.Operation]: + return self._patch_rule + + @property + def remove_association( + self, + ) -> Callable[[compute.RemoveAssociationFirewallPolicyRequest], compute.Operation]: + return self._remove_association + + @property + def remove_rule( + self, + ) -> Callable[[compute.RemoveRuleFirewallPolicyRequest], compute.Operation]: + return self._remove_rule + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyFirewallPolicyRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsFirewallPolicyRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("FirewallPoliciesRestTransport",) diff --git a/google/cloud/compute_v1/services/firewalls/client.py b/google/cloud/compute_v1/services/firewalls/client.py index 03348650b..6af9f0a46 100644 --- a/google/cloud/compute_v1/services/firewalls/client.py +++ b/google/cloud/compute_v1/services/firewalls/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.firewalls import pagers from google.cloud.compute_v1.types import compute from .transports.base import FirewallsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,22 +335,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteFirewallRequest = None, + request: Union[compute.DeleteFirewallRequest, dict] = None, *, project: str = None, firewall: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified firewall. Args: - request (google.cloud.compute_v1.types.DeleteFirewallRequest): + request (Union[google.cloud.compute_v1.types.DeleteFirewallRequest, dict]): The request object. A request message for Firewalls.Delete. See the method description for details. @@ -413,18 +425,18 @@ def delete( def get( self, - request: compute.GetFirewallRequest = None, + request: Union[compute.GetFirewallRequest, dict] = None, *, project: str = None, firewall: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Firewall: r"""Returns the specified firewall. Args: - request (google.cloud.compute_v1.types.GetFirewallRequest): + request (Union[google.cloud.compute_v1.types.GetFirewallRequest, dict]): The request object. A request message for Firewalls.Get. See the method description for details. project (str): @@ -487,11 +499,11 @@ def get( def insert( self, - request: compute.InsertFirewallRequest = None, + request: Union[compute.InsertFirewallRequest, dict] = None, *, project: str = None, firewall_resource: compute.Firewall = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -499,7 +511,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertFirewallRequest): + request (Union[google.cloud.compute_v1.types.InsertFirewallRequest, dict]): The request object. A request message for Firewalls.Insert. See the method description for details. @@ -573,10 +585,10 @@ def insert( def list( self, - request: compute.ListFirewallsRequest = None, + request: Union[compute.ListFirewallsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -584,7 +596,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListFirewallsRequest): + request (Union[google.cloud.compute_v1.types.ListFirewallsRequest, dict]): The request object. A request message for Firewalls.List. See the method description for details. project (str): @@ -645,12 +657,12 @@ def list( def patch( self, - request: compute.PatchFirewallRequest = None, + request: Union[compute.PatchFirewallRequest, dict] = None, *, project: str = None, firewall: str = None, firewall_resource: compute.Firewall = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -660,7 +672,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchFirewallRequest): + request (Union[google.cloud.compute_v1.types.PatchFirewallRequest, dict]): The request object. A request message for Firewalls.Patch. See the method description for details. project (str): @@ -740,12 +752,12 @@ def patch( def update( self, - request: compute.UpdateFirewallRequest = None, + request: Union[compute.UpdateFirewallRequest, dict] = None, *, project: str = None, firewall: str = None, firewall_resource: compute.Firewall = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -756,7 +768,7 @@ def update( instead. Args: - request (google.cloud.compute_v1.types.UpdateFirewallRequest): + request (Union[google.cloud.compute_v1.types.UpdateFirewallRequest, dict]): The request object. A request message for Firewalls.Update. See the method description for details. @@ -835,6 +847,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/firewalls/pagers.py b/google/cloud/compute_v1/services/firewalls/pagers.py index 675df6778..6a719f9e0 100644 --- a/google/cloud/compute_v1/services/firewalls/pagers.py +++ b/google/cloud/compute_v1/services/firewalls/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.FirewallList]: + def pages(self) -> Iterator[compute.FirewallList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Firewall]: + def __iter__(self) -> Iterator[compute.Firewall]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/firewalls/transports/base.py b/google/cloud/compute_v1/services/firewalls/transports/base.py index b57a2648b..61a7d95be 100644 --- a/google/cloud/compute_v1/services/firewalls/transports/base.py +++ b/google/cloud/compute_v1/services/firewalls/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class FirewallsTransport(abc.ABC): """Abstract transport class for Firewalls.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/firewalls/transports/rest.py b/google/cloud/compute_v1/services/firewalls/transports/rest.py index db25c5c0a..1a29627db 100644 --- a/google/cloud/compute_v1/services/firewalls/transports/rest.py +++ b/google/cloud/compute_v1/services/firewalls/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import FirewallsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import FirewallsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class FirewallsRestTransport(FirewallsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteFirewallRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +136,9 @@ def delete( Firewalls.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,22 +162,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/firewalls/{firewall}".format( - host=self._host, project=request.project, firewall=request.firewall, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/firewalls/{firewall}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall", "firewall"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteFirewallRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteFirewallRequest.to_json( + compute.DeleteFirewallRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteFirewallRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +218,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetFirewallRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Firewall: r"""Call the get method over HTTP. @@ -173,6 +233,9 @@ def get( The request object. A request message for Firewalls.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -186,20 +249,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/firewalls/{firewall}".format( - host=self._host, project=request.project, firewall=request.firewall, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/firewalls/{firewall}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall", "firewall"), + ("project", "project"), + ] + + request_kwargs = compute.GetFirewallRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetFirewallRequest.to_json( + compute.GetFirewallRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -209,10 +305,12 @@ def get( # Return the response return compute.Firewall.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertFirewallRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -223,6 +321,9 @@ def insert( Firewalls.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -246,30 +347,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/firewalls", + "body": "firewall_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertFirewallRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Firewall.to_json( - request.firewall_resource, + compute.Firewall(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/firewalls".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertFirewallRequest.to_json( + compute.InsertFirewallRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertFirewallRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -280,10 +410,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListFirewallsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.FirewallList: r"""Call the list method over HTTP. @@ -293,6 +425,9 @@ def list( The request object. A request message for Firewalls.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -301,30 +436,52 @@ def list( Contains a list of firewalls. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/firewalls".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/firewalls", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListFirewallsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListFirewallsRequest.to_json( + compute.ListFirewallsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListFirewallsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListFirewallsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListFirewallsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListFirewallsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListFirewallsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -336,10 +493,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchFirewallRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -350,6 +509,9 @@ def patch( Firewalls.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -373,30 +535,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/firewalls/{firewall}", + "body": "firewall_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall", "firewall"), + ("project", "project"), + ] + + request_kwargs = compute.PatchFirewallRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Firewall.to_json( - request.firewall_resource, + compute.Firewall(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/firewalls/{firewall}".format( - host=self._host, project=request.project, firewall=request.firewall, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchFirewallRequest.to_json( + compute.PatchFirewallRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchFirewallRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -407,10 +599,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateFirewallRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -421,6 +615,9 @@ def update( Firewalls.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -444,30 +641,60 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/global/firewalls/{firewall}", + "body": "firewall_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("firewall", "firewall"), + ("project", "project"), + ] + + request_kwargs = compute.UpdateFirewallRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Firewall.to_json( - request.firewall_resource, + compute.Firewall(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/firewalls/{firewall}".format( - host=self._host, project=request.project, firewall=request.firewall, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateFirewallRequest.to_json( + compute.UpdateFirewallRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateFirewallRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -478,5 +705,32 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete(self) -> Callable[[compute.DeleteFirewallRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetFirewallRequest], compute.Firewall]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertFirewallRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListFirewallsRequest], compute.FirewallList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchFirewallRequest], compute.Operation]: + return self._patch + + @property + def update(self) -> Callable[[compute.UpdateFirewallRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("FirewallsRestTransport",) diff --git a/google/cloud/compute_v1/services/forwarding_rules/client.py b/google/cloud/compute_v1/services/forwarding_rules/client.py index d2360addc..463175b72 100644 --- a/google/cloud/compute_v1/services/forwarding_rules/client.py +++ b/google/cloud/compute_v1/services/forwarding_rules/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.forwarding_rules import pagers from google.cloud.compute_v1.types import compute from .transports.base import ForwardingRulesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,21 +337,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListForwardingRulesRequest = None, + request: Union[compute.AggregatedListForwardingRulesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of forwarding rules. Args: - request (google.cloud.compute_v1.types.AggregatedListForwardingRulesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListForwardingRulesRequest, dict]): The request object. A request message for ForwardingRules.AggregatedList. See the method description for details. @@ -401,19 +413,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteForwardingRuleRequest = None, + request: Union[compute.DeleteForwardingRuleRequest, dict] = None, *, project: str = None, region: str = None, forwarding_rule: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified ForwardingRule resource. Args: - request (google.cloud.compute_v1.types.DeleteForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.DeleteForwardingRuleRequest, dict]): The request object. A request message for ForwardingRules.Delete. See the method description for details. @@ -498,19 +510,19 @@ def delete( def get( self, - request: compute.GetForwardingRuleRequest = None, + request: Union[compute.GetForwardingRuleRequest, dict] = None, *, project: str = None, region: str = None, forwarding_rule: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRule: r"""Returns the specified ForwardingRule resource. Args: - request (google.cloud.compute_v1.types.GetForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.GetForwardingRuleRequest, dict]): The request object. A request message for ForwardingRules.Get. See the method description for details. @@ -592,12 +604,12 @@ def get( def insert( self, - request: compute.InsertForwardingRuleRequest = None, + request: Union[compute.InsertForwardingRuleRequest, dict] = None, *, project: str = None, region: str = None, forwarding_rule_resource: compute.ForwardingRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -606,7 +618,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.InsertForwardingRuleRequest, dict]): The request object. A request message for ForwardingRules.Insert. See the method description for details. @@ -689,11 +701,11 @@ def insert( def list( self, - request: compute.ListForwardingRulesRequest = None, + request: Union[compute.ListForwardingRulesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -701,7 +713,7 @@ def list( available to the specified project and region. Args: - request (google.cloud.compute_v1.types.ListForwardingRulesRequest): + request (Union[google.cloud.compute_v1.types.ListForwardingRulesRequest, dict]): The request object. A request message for ForwardingRules.List. See the method description for details. @@ -773,13 +785,13 @@ def list( def patch( self, - request: compute.PatchForwardingRuleRequest = None, + request: Union[compute.PatchForwardingRuleRequest, dict] = None, *, project: str = None, region: str = None, forwarding_rule: str = None, forwarding_rule_resource: compute.ForwardingRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -789,7 +801,7 @@ def patch( only patch the network_tier field. Args: - request (google.cloud.compute_v1.types.PatchForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.PatchForwardingRuleRequest, dict]): The request object. A request message for ForwardingRules.Patch. See the method description for details. @@ -883,13 +895,13 @@ def patch( def set_labels( self, - request: compute.SetLabelsForwardingRuleRequest = None, + request: Union[compute.SetLabelsForwardingRuleRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_labels_request_resource: compute.RegionSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -898,7 +910,7 @@ def set_labels( documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsForwardingRuleRequest, dict]): The request object. A request message for ForwardingRules.SetLabels. See the method description for details. @@ -992,13 +1004,13 @@ def set_labels( def set_target( self, - request: compute.SetTargetForwardingRuleRequest = None, + request: Union[compute.SetTargetForwardingRuleRequest, dict] = None, *, project: str = None, region: str = None, forwarding_rule: str = None, target_reference_resource: compute.TargetReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1006,7 +1018,7 @@ def set_target( target should be of the same type as the old target. Args: - request (google.cloud.compute_v1.types.SetTargetForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.SetTargetForwardingRuleRequest, dict]): The request object. A request message for ForwardingRules.SetTarget. See the method description for details. @@ -1098,6 +1110,19 @@ def set_target( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/forwarding_rules/pagers.py b/google/cloud/compute_v1/services/forwarding_rules/pagers.py index 87af6c676..af571023d 100644 --- a/google/cloud/compute_v1/services/forwarding_rules/pagers.py +++ b/google/cloud/compute_v1/services/forwarding_rules/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ForwardingRuleAggregatedList]: + def pages(self) -> Iterator[compute.ForwardingRuleAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.ForwardingRulesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.ForwardingRulesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ForwardingRuleList]: + def pages(self) -> Iterator[compute.ForwardingRuleList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ForwardingRule]: + def __iter__(self) -> Iterator[compute.ForwardingRule]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/forwarding_rules/transports/base.py b/google/cloud/compute_v1/services/forwarding_rules/transports/base.py index f8af0c13a..e396bfb0b 100644 --- a/google/cloud/compute_v1/services/forwarding_rules/transports/base.py +++ b/google/cloud/compute_v1/services/forwarding_rules/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ForwardingRulesTransport(abc.ABC): """Abstract transport class for ForwardingRules.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -184,6 +148,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py b/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py index 0b20495b6..1fe785a2a 100644 --- a/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py +++ b/google/cloud/compute_v1/services/forwarding_rules/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + ForwardingRulesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import ForwardingRulesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ForwardingRulesRestTransport(ForwardingRulesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListForwardingRulesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRuleAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( ForwardingRules.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/forwardingRules".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/forwardingRules", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListForwardingRulesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListForwardingRulesRequest.to_json( + compute.AggregatedListForwardingRulesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListForwardingRulesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListForwardingRulesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListForwardingRulesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListForwardingRulesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListForwardingRulesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListForwardingRulesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( ForwardingRules.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,25 +251,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( - host=self._host, - project=request.project, - region=request.region, - forwarding_rule=request.forwarding_rule, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteForwardingRuleRequest.to_json( + compute.DeleteForwardingRuleRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +308,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRule: r"""Call the get method over HTTP. @@ -239,6 +324,9 @@ def get( ForwardingRules.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -259,23 +347,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( - host=self._host, - project=request.project, - region=request.region, - forwarding_rule=request.forwarding_rule, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetForwardingRuleRequest.to_json( + compute.GetForwardingRuleRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -287,10 +406,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -301,6 +422,9 @@ def insert( ForwardingRules.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -324,30 +448,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules", + "body": "forwarding_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ForwardingRule.to_json( - request.forwarding_rule_resource, + compute.ForwardingRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertForwardingRuleRequest.to_json( + compute.InsertForwardingRuleRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -358,10 +512,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListForwardingRulesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRuleList: r"""Call the list method over HTTP. @@ -372,6 +528,9 @@ def list( ForwardingRules.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -382,30 +541,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListForwardingRulesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListForwardingRulesRequest.to_json( + compute.ListForwardingRulesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListForwardingRulesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListForwardingRulesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListForwardingRulesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListForwardingRulesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListForwardingRulesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -417,10 +599,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -431,6 +615,9 @@ def patch( ForwardingRules.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -454,33 +641,61 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}", + "body": "forwarding_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ForwardingRule.to_json( - request.forwarding_rule_resource, + compute.ForwardingRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}".format( - host=self._host, - project=request.project, - region=request.region, - forwarding_rule=request.forwarding_rule, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchForwardingRuleRequest.to_json( + compute.PatchForwardingRuleRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -491,10 +706,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -505,6 +722,9 @@ def set_labels( ForwardingRules.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -528,33 +748,63 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels", + "body": "region_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetLabelsRequest.to_json( - request.region_set_labels_request_resource, + compute.RegionSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsForwardingRuleRequest.to_json( + compute.SetLabelsForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetLabelsForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -565,10 +815,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_target( + def _set_target( self, request: compute.SetTargetForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set target method over HTTP. @@ -579,6 +831,9 @@ def set_target( ForwardingRules.SetTarget. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -602,33 +857,63 @@ def set_target( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}/setTarget", + "body": "target_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.SetTargetForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetReference.to_json( - request.target_reference_resource, + compute.TargetReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}/setTarget".format( - host=self._host, - project=request.project, - region=request.region, - forwarding_rule=request.forwarding_rule, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetTargetForwardingRuleRequest.to_json( + compute.SetTargetForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetTargetForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -639,5 +924,59 @@ def set_target( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListForwardingRulesRequest], + compute.ForwardingRuleAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteForwardingRuleRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetForwardingRuleRequest], compute.ForwardingRule]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertForwardingRuleRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListForwardingRulesRequest], compute.ForwardingRuleList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchForwardingRuleRequest], compute.Operation]: + return self._patch + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsForwardingRuleRequest], compute.Operation]: + return self._set_labels + + @property + def set_target( + self, + ) -> Callable[[compute.SetTargetForwardingRuleRequest], compute.Operation]: + return self._set_target + + def close(self): + self._session.close() + __all__ = ("ForwardingRulesRestTransport",) diff --git a/google/cloud/compute_v1/services/global_addresses/client.py b/google/cloud/compute_v1/services/global_addresses/client.py index fdd0d2831..e2a10b61c 100644 --- a/google/cloud/compute_v1/services/global_addresses/client.py +++ b/google/cloud/compute_v1/services/global_addresses/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.global_addresses import pagers from google.cloud.compute_v1.types import compute from .transports.base import GlobalAddressesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,22 +337,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteGlobalAddressRequest = None, + request: Union[compute.DeleteGlobalAddressRequest, dict] = None, *, project: str = None, address: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified address resource. Args: - request (google.cloud.compute_v1.types.DeleteGlobalAddressRequest): + request (Union[google.cloud.compute_v1.types.DeleteGlobalAddressRequest, dict]): The request object. A request message for GlobalAddresses.Delete. See the method description for details. @@ -417,11 +429,11 @@ def delete( def get( self, - request: compute.GetGlobalAddressRequest = None, + request: Union[compute.GetGlobalAddressRequest, dict] = None, *, project: str = None, address: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Address: @@ -429,7 +441,7 @@ def get( of available addresses by making a list() request. Args: - request (google.cloud.compute_v1.types.GetGlobalAddressRequest): + request (Union[google.cloud.compute_v1.types.GetGlobalAddressRequest, dict]): The request object. A request message for GlobalAddresses.Get. See the method description for details. @@ -497,11 +509,11 @@ def get( def insert( self, - request: compute.InsertGlobalAddressRequest = None, + request: Union[compute.InsertGlobalAddressRequest, dict] = None, *, project: str = None, address_resource: compute.Address = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -509,7 +521,7 @@ def insert( by using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertGlobalAddressRequest): + request (Union[google.cloud.compute_v1.types.InsertGlobalAddressRequest, dict]): The request object. A request message for GlobalAddresses.Insert. See the method description for details. @@ -583,17 +595,17 @@ def insert( def list( self, - request: compute.ListGlobalAddressesRequest = None, + request: Union[compute.ListGlobalAddressesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: r"""Retrieves a list of global addresses. Args: - request (google.cloud.compute_v1.types.ListGlobalAddressesRequest): + request (Union[google.cloud.compute_v1.types.ListGlobalAddressesRequest, dict]): The request object. A request message for GlobalAddresses.List. See the method description for details. @@ -653,6 +665,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/global_addresses/pagers.py b/google/cloud/compute_v1/services/global_addresses/pagers.py index b5d65118e..81630f15e 100644 --- a/google/cloud/compute_v1/services/global_addresses/pagers.py +++ b/google/cloud/compute_v1/services/global_addresses/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.AddressList]: + def pages(self) -> Iterator[compute.AddressList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Address]: + def __iter__(self) -> Iterator[compute.Address]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/global_addresses/transports/base.py b/google/cloud/compute_v1/services/global_addresses/transports/base.py index d380c32be..cf6f3d91e 100644 --- a/google/cloud/compute_v1/services/global_addresses/transports/base.py +++ b/google/cloud/compute_v1/services/global_addresses/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class GlobalAddressesTransport(abc.ABC): """Abstract transport class for GlobalAddresses.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/global_addresses/transports/rest.py b/google/cloud/compute_v1/services/global_addresses/transports/rest.py index 4d4ee1fda..1ea1a1ce4 100644 --- a/google/cloud/compute_v1/services/global_addresses/transports/rest.py +++ b/google/cloud/compute_v1/services/global_addresses/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + GlobalAddressesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import GlobalAddressesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class GlobalAddressesRestTransport(GlobalAddressesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteGlobalAddressRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( GlobalAddresses.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,22 +165,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/addresses/{address}".format( - host=self._host, project=request.project, address=request.address, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/addresses/{address}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("address", "address"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteGlobalAddressRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteGlobalAddressRequest.to_json( + compute.DeleteGlobalAddressRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteGlobalAddressRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +221,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetGlobalAddressRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Address: r"""Call the get method over HTTP. @@ -174,6 +237,9 @@ def get( GlobalAddresses.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -189,20 +255,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/addresses/{address}".format( - host=self._host, project=request.project, address=request.address, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/addresses/{address}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("address", "address"), + ("project", "project"), + ] + + request_kwargs = compute.GetGlobalAddressRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGlobalAddressRequest.to_json( + compute.GetGlobalAddressRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -212,10 +311,12 @@ def get( # Return the response return compute.Address.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertGlobalAddressRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -226,6 +327,9 @@ def insert( GlobalAddresses.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -249,30 +353,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/addresses", + "body": "address_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertGlobalAddressRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Address.to_json( - request.address_resource, + compute.Address(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/addresses".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertGlobalAddressRequest.to_json( + compute.InsertGlobalAddressRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertGlobalAddressRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -283,10 +416,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListGlobalAddressesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.AddressList: r"""Call the list method over HTTP. @@ -297,6 +432,9 @@ def list( GlobalAddresses.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -305,30 +443,52 @@ def list( Contains a list of addresses. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/addresses".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/addresses", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListGlobalAddressesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListGlobalAddressesRequest.to_json( + compute.ListGlobalAddressesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListGlobalAddressesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListGlobalAddressesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListGlobalAddressesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListGlobalAddressesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListGlobalAddressesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -340,5 +500,30 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteGlobalAddressRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetGlobalAddressRequest], compute.Address]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertGlobalAddressRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListGlobalAddressesRequest], compute.AddressList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("GlobalAddressesRestTransport",) diff --git a/google/cloud/compute_v1/services/global_forwarding_rules/client.py b/google/cloud/compute_v1/services/global_forwarding_rules/client.py index 494823484..6f06bc09a 100644 --- a/google/cloud/compute_v1/services/global_forwarding_rules/client.py +++ b/google/cloud/compute_v1/services/global_forwarding_rules/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.global_forwarding_rules import pagers from google.cloud.compute_v1.types import compute from .transports.base import GlobalForwardingRulesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,22 +339,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteGlobalForwardingRuleRequest = None, + request: Union[compute.DeleteGlobalForwardingRuleRequest, dict] = None, *, project: str = None, forwarding_rule: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified GlobalForwardingRule resource. Args: - request (google.cloud.compute_v1.types.DeleteGlobalForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.DeleteGlobalForwardingRuleRequest, dict]): The request object. A request message for GlobalForwardingRules.Delete. See the method description for details. @@ -419,11 +431,11 @@ def delete( def get( self, - request: compute.GetGlobalForwardingRuleRequest = None, + request: Union[compute.GetGlobalForwardingRuleRequest, dict] = None, *, project: str = None, forwarding_rule: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRule: @@ -432,7 +444,7 @@ def get( list() request. Args: - request (google.cloud.compute_v1.types.GetGlobalForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.GetGlobalForwardingRuleRequest, dict]): The request object. A request message for GlobalForwardingRules.Get. See the method description for details. @@ -505,11 +517,11 @@ def get( def insert( self, - request: compute.InsertGlobalForwardingRuleRequest = None, + request: Union[compute.InsertGlobalForwardingRuleRequest, dict] = None, *, project: str = None, forwarding_rule_resource: compute.ForwardingRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -518,7 +530,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertGlobalForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.InsertGlobalForwardingRuleRequest, dict]): The request object. A request message for GlobalForwardingRules.Insert. See the method description for details. @@ -592,10 +604,10 @@ def insert( def list( self, - request: compute.ListGlobalForwardingRulesRequest = None, + request: Union[compute.ListGlobalForwardingRulesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -603,7 +615,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListGlobalForwardingRulesRequest): + request (Union[google.cloud.compute_v1.types.ListGlobalForwardingRulesRequest, dict]): The request object. A request message for GlobalForwardingRules.List. See the method description for details. @@ -666,12 +678,12 @@ def list( def patch( self, - request: compute.PatchGlobalForwardingRuleRequest = None, + request: Union[compute.PatchGlobalForwardingRuleRequest, dict] = None, *, project: str = None, forwarding_rule: str = None, forwarding_rule_resource: compute.ForwardingRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -681,7 +693,7 @@ def patch( only patch the network_tier field. Args: - request (google.cloud.compute_v1.types.PatchGlobalForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.PatchGlobalForwardingRuleRequest, dict]): The request object. A request message for GlobalForwardingRules.Patch. See the method description for details. @@ -764,12 +776,12 @@ def patch( def set_labels( self, - request: compute.SetLabelsGlobalForwardingRuleRequest = None, + request: Union[compute.SetLabelsGlobalForwardingRuleRequest, dict] = None, *, project: str = None, resource: str = None, global_set_labels_request_resource: compute.GlobalSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -778,7 +790,7 @@ def set_labels( documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsGlobalForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsGlobalForwardingRuleRequest, dict]): The request object. A request message for GlobalForwardingRules.SetLabels. See the method description for details. @@ -865,12 +877,12 @@ def set_labels( def set_target( self, - request: compute.SetTargetGlobalForwardingRuleRequest = None, + request: Union[compute.SetTargetGlobalForwardingRuleRequest, dict] = None, *, project: str = None, forwarding_rule: str = None, target_reference_resource: compute.TargetReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -879,7 +891,7 @@ def set_target( the old target. Args: - request (google.cloud.compute_v1.types.SetTargetGlobalForwardingRuleRequest): + request (Union[google.cloud.compute_v1.types.SetTargetGlobalForwardingRuleRequest, dict]): The request object. A request message for GlobalForwardingRules.SetTarget. See the method description for details. @@ -962,6 +974,19 @@ def set_target( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py b/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py index e5019cd68..c2648e19b 100644 --- a/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py +++ b/google/cloud/compute_v1/services/global_forwarding_rules/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ForwardingRuleList]: + def pages(self) -> Iterator[compute.ForwardingRuleList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ForwardingRule]: + def __iter__(self) -> Iterator[compute.ForwardingRule]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py b/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py index e61272413..8ee16c772 100644 --- a/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py +++ b/google/cloud/compute_v1/services/global_forwarding_rules/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class GlobalForwardingRulesTransport(abc.ABC): """Abstract transport class for GlobalForwardingRules.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -181,6 +145,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py b/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py index 37b741d9f..29c2fd124 100644 --- a/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py +++ b/google/cloud/compute_v1/services/global_forwarding_rules/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + GlobalForwardingRulesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import GlobalForwardingRulesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class GlobalForwardingRulesRestTransport(GlobalForwardingRulesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteGlobalForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( GlobalForwardingRules.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}".format( - host=self._host, - project=request.project, - forwarding_rule=request.forwarding_rule, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteGlobalForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteGlobalForwardingRuleRequest.to_json( + compute.DeleteGlobalForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteGlobalForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +223,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetGlobalForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRule: r"""Call the get method over HTTP. @@ -176,6 +239,9 @@ def get( GlobalForwardingRules.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -196,22 +262,55 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}".format( - host=self._host, - project=request.project, - forwarding_rule=request.forwarding_rule, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ] + + request_kwargs = compute.GetGlobalForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGlobalForwardingRuleRequest.to_json( + compute.GetGlobalForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -223,10 +322,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertGlobalForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -237,6 +338,9 @@ def insert( GlobalForwardingRules.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -260,30 +364,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/forwardingRules", + "body": "forwarding_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertGlobalForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ForwardingRule.to_json( - request.forwarding_rule_resource, + compute.ForwardingRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertGlobalForwardingRuleRequest.to_json( + compute.InsertGlobalForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertGlobalForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -294,10 +429,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListGlobalForwardingRulesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ForwardingRuleList: r"""Call the list method over HTTP. @@ -308,6 +445,9 @@ def list( GlobalForwardingRules.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -318,30 +458,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/forwardingRules", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListGlobalForwardingRulesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListGlobalForwardingRulesRequest.to_json( + compute.ListGlobalForwardingRulesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListGlobalForwardingRulesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListGlobalForwardingRulesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListGlobalForwardingRulesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListGlobalForwardingRulesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListGlobalForwardingRulesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -353,10 +517,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchGlobalForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -367,6 +533,9 @@ def patch( GlobalForwardingRules.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -390,32 +559,62 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}", + "body": "forwarding_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ] + + request_kwargs = compute.PatchGlobalForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ForwardingRule.to_json( - request.forwarding_rule_resource, + compute.ForwardingRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}".format( - host=self._host, - project=request.project, - forwarding_rule=request.forwarding_rule, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchGlobalForwardingRuleRequest.to_json( + compute.PatchGlobalForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchGlobalForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -426,10 +625,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsGlobalForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -440,6 +641,9 @@ def set_labels( GlobalForwardingRules.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -463,28 +667,62 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/forwardingRules/{resource}/setLabels", + "body": "global_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsGlobalForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetLabelsRequest.to_json( - request.global_set_labels_request_resource, + compute.GlobalSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules/{resource}/setLabels".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsGlobalForwardingRuleRequest.to_json( + compute.SetLabelsGlobalForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -495,10 +733,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_target( + def _set_target( self, request: compute.SetTargetGlobalForwardingRuleRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set target method over HTTP. @@ -509,6 +749,9 @@ def set_target( GlobalForwardingRules.SetTarget. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -532,32 +775,62 @@ def set_target( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}/setTarget", + "body": "target_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("forwarding_rule", "forwardingRule"), + ("project", "project"), + ] + + request_kwargs = compute.SetTargetGlobalForwardingRuleRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetReference.to_json( - request.target_reference_resource, + compute.TargetReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}/setTarget".format( - host=self._host, - project=request.project, - forwarding_rule=request.forwarding_rule, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetTargetGlobalForwardingRuleRequest.to_json( + compute.SetTargetGlobalForwardingRuleRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetTargetGlobalForwardingRuleRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -568,5 +841,52 @@ def set_target( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteGlobalForwardingRuleRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetGlobalForwardingRuleRequest], compute.ForwardingRule]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertGlobalForwardingRuleRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListGlobalForwardingRulesRequest], compute.ForwardingRuleList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchGlobalForwardingRuleRequest], compute.Operation]: + return self._patch + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsGlobalForwardingRuleRequest], compute.Operation]: + return self._set_labels + + @property + def set_target( + self, + ) -> Callable[[compute.SetTargetGlobalForwardingRuleRequest], compute.Operation]: + return self._set_target + + def close(self): + self._session.close() + __all__ = ("GlobalForwardingRulesRestTransport",) diff --git a/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py b/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py index 2670eb7fe..25adf124c 100644 --- a/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py +++ b/google/cloud/compute_v1/services/global_network_endpoint_groups/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.global_network_endpoint_groups import pagers from google.cloud.compute_v1.types import compute from .transports.base import GlobalNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO @@ -267,8 +271,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -330,16 +341,19 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def attach_network_endpoints( self, - request: compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest = None, + request: Union[ + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict + ] = None, *, project: str = None, network_endpoint_group: str = None, global_network_endpoint_groups_attach_endpoints_request_resource: compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -347,7 +361,7 @@ def attach_network_endpoints( endpoint group. Args: - request (google.cloud.compute_v1.types.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.AttachNetworkEndpoints. See the method description for details. @@ -447,11 +461,11 @@ def attach_network_endpoints( def delete( self, - request: compute.DeleteGlobalNetworkEndpointGroupRequest = None, + request: Union[compute.DeleteGlobalNetworkEndpointGroupRequest, dict] = None, *, project: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -460,7 +474,7 @@ def delete( services referencing it. Args: - request (google.cloud.compute_v1.types.DeleteGlobalNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.DeleteGlobalNetworkEndpointGroupRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.Delete. See the method description for details. @@ -537,12 +551,14 @@ def delete( def detach_network_endpoints( self, - request: compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest = None, + request: Union[ + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict + ] = None, *, project: str = None, network_endpoint_group: str = None, global_network_endpoint_groups_detach_endpoints_request_resource: compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -550,7 +566,7 @@ def detach_network_endpoints( network endpoint group. Args: - request (google.cloud.compute_v1.types.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.DetachNetworkEndpoints. See the method description for details. @@ -650,11 +666,11 @@ def detach_network_endpoints( def get( self, - request: compute.GetGlobalNetworkEndpointGroupRequest = None, + request: Union[compute.GetGlobalNetworkEndpointGroupRequest, dict] = None, *, project: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroup: @@ -663,7 +679,7 @@ def get( list() request. Args: - request (google.cloud.compute_v1.types.GetGlobalNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.GetGlobalNetworkEndpointGroupRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.Get. See the method description for details. @@ -735,11 +751,11 @@ def get( def insert( self, - request: compute.InsertGlobalNetworkEndpointGroupRequest = None, + request: Union[compute.InsertGlobalNetworkEndpointGroupRequest, dict] = None, *, project: str = None, network_endpoint_group_resource: compute.NetworkEndpointGroup = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -748,7 +764,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertGlobalNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.InsertGlobalNetworkEndpointGroupRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.Insert. See the method description for details. @@ -824,10 +840,10 @@ def insert( def list( self, - request: compute.ListGlobalNetworkEndpointGroupsRequest = None, + request: Union[compute.ListGlobalNetworkEndpointGroupsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -835,7 +851,7 @@ def list( are located in the specified project. Args: - request (google.cloud.compute_v1.types.ListGlobalNetworkEndpointGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListGlobalNetworkEndpointGroupsRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.List. See the method description for details. @@ -896,11 +912,13 @@ def list( def list_network_endpoints( self, - request: compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest = None, + request: Union[ + compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, dict + ] = None, *, project: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNetworkEndpointsPager: @@ -908,7 +926,7 @@ def list_network_endpoints( endpoint group. Args: - request (google.cloud.compute_v1.types.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, dict]): The request object. A request message for GlobalNetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. @@ -982,6 +1000,19 @@ def list_network_endpoints( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py b/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py index a7ccd93c7..703fbead1 100644 --- a/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py +++ b/google/cloud/compute_v1/services/global_network_endpoint_groups/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkEndpointGroupList]: + def pages(self) -> Iterator[compute.NetworkEndpointGroupList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NetworkEndpointGroup]: + def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: for page in self.pages: yield from page.items @@ -138,14 +138,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkEndpointGroupsListNetworkEndpoints]: + def pages(self) -> Iterator[compute.NetworkEndpointGroupsListNetworkEndpoints]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NetworkEndpointWithHealthStatus]: + def __iter__(self) -> Iterator[compute.NetworkEndpointWithHealthStatus]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py b/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py index 70115faff..65f6bc2a7 100644 --- a/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py +++ b/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class GlobalNetworkEndpointGroupsTransport(abc.ABC): """Abstract transport class for GlobalNetworkEndpointGroups.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -187,6 +151,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def attach_network_endpoints( self, diff --git a/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py b/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py index 696b006b4..d76eb669d 100644 --- a/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py +++ b/google/cloud/compute_v1/services/global_network_endpoint_groups/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + GlobalNetworkEndpointGroupsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import GlobalNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class GlobalNetworkEndpointGroupsRestTransport(GlobalNetworkEndpointGroupsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def attach_network_endpoints( + def _attach_network_endpoints( self, request: compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the attach network endpoints method over HTTP. @@ -112,6 +139,9 @@ def attach_network_endpoints( GlobalNetworkEndpointGroups.AttachNetworkEndpoints. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,35 +165,66 @@ def attach_network_endpoints( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints", + "body": "global_network_endpoint_groups_attach_endpoints_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ] + + request_kwargs = compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest.to_json( - request.global_network_endpoint_groups_attach_endpoints_request_resource, + compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints".format( - host=self._host, - project=request.project, - network_endpoint_group=request.network_endpoint_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.to_json( + compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -174,10 +235,12 @@ def attach_network_endpoints( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -188,6 +251,9 @@ def delete( GlobalNetworkEndpointGroups.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -211,24 +277,57 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}".format( - host=self._host, - project=request.project, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteGlobalNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteGlobalNetworkEndpointGroupRequest.to_json( + compute.DeleteGlobalNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteGlobalNetworkEndpointGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -238,10 +337,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def detach_network_endpoints( + def _detach_network_endpoints( self, request: compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the detach network endpoints method over HTTP. @@ -252,6 +353,9 @@ def detach_network_endpoints( GlobalNetworkEndpointGroups.DetachNetworkEndpoints. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -275,35 +379,66 @@ def detach_network_endpoints( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints", + "body": "global_network_endpoint_groups_detach_endpoints_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ] + + request_kwargs = compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest.to_json( - request.global_network_endpoint_groups_detach_endpoints_request_resource, + compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints".format( - host=self._host, - project=request.project, - network_endpoint_group=request.network_endpoint_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.to_json( + compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -314,10 +449,12 @@ def detach_network_endpoints( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroup: r"""Call the get method over HTTP. @@ -328,6 +465,9 @@ def get( GlobalNetworkEndpointGroups.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -347,22 +487,55 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}".format( - host=self._host, - project=request.project, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ] + + request_kwargs = compute.GetGlobalNetworkEndpointGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGlobalNetworkEndpointGroupRequest.to_json( + compute.GetGlobalNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -374,10 +547,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertGlobalNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -388,6 +563,9 @@ def insert( GlobalNetworkEndpointGroups.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -411,30 +589,63 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups", + "body": "network_endpoint_group_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertGlobalNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkEndpointGroup.to_json( - request.network_endpoint_group_resource, + compute.NetworkEndpointGroup(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertGlobalNetworkEndpointGroupRequest.to_json( + compute.InsertGlobalNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertGlobalNetworkEndpointGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -445,10 +656,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListGlobalNetworkEndpointGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroupList: r"""Call the list method over HTTP. @@ -459,6 +672,9 @@ def list( GlobalNetworkEndpointGroups.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -467,33 +683,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListGlobalNetworkEndpointGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListGlobalNetworkEndpointGroupsRequest.to_json( + compute.ListGlobalNetworkEndpointGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListGlobalNetworkEndpointGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListGlobalNetworkEndpointGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListGlobalNetworkEndpointGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListGlobalNetworkEndpointGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListGlobalNetworkEndpointGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -505,10 +742,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_network_endpoints( + def _list_network_endpoints( self, request: compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: r"""Call the list network endpoints method over HTTP. @@ -519,6 +758,9 @@ def list_network_endpoints( GlobalNetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -527,47 +769,57 @@ def list_network_endpoints( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints".format( - host=self._host, - project=request.project, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ] + + request_kwargs = compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.to_json( + compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.filter - in request - ): - query_params["filter"] = request.filter - if ( - compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if ( - compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.order_by - in request - ): - query_params["orderBy"] = request.order_by - if ( - compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -579,5 +831,64 @@ def list_network_endpoints( response.content, ignore_unknown_fields=True ) + @property + def attach_network_endpoints( + self, + ) -> Callable[ + [compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest], + compute.Operation, + ]: + return self._attach_network_endpoints + + @property + def delete( + self, + ) -> Callable[[compute.DeleteGlobalNetworkEndpointGroupRequest], compute.Operation]: + return self._delete + + @property + def detach_network_endpoints( + self, + ) -> Callable[ + [compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest], + compute.Operation, + ]: + return self._detach_network_endpoints + + @property + def get( + self, + ) -> Callable[ + [compute.GetGlobalNetworkEndpointGroupRequest], compute.NetworkEndpointGroup + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertGlobalNetworkEndpointGroupRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListGlobalNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupList, + ]: + return self._list + + @property + def list_network_endpoints( + self, + ) -> Callable[ + [compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupsListNetworkEndpoints, + ]: + return self._list_network_endpoints + + def close(self): + self._session.close() + __all__ = ("GlobalNetworkEndpointGroupsRestTransport",) diff --git a/google/cloud/compute_v1/services/global_operations/client.py b/google/cloud/compute_v1/services/global_operations/client.py index 379011f97..cb943eb6c 100644 --- a/google/cloud/compute_v1/services/global_operations/client.py +++ b/google/cloud/compute_v1/services/global_operations/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.global_operations import pagers from google.cloud.compute_v1.types import compute from .transports.base import GlobalOperationsTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,21 +337,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListGlobalOperationsRequest = None, + request: Union[compute.AggregatedListGlobalOperationsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of all operations. Args: - request (google.cloud.compute_v1.types.AggregatedListGlobalOperationsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListGlobalOperationsRequest, dict]): The request object. A request message for GlobalOperations.AggregatedList. See the method description for details. @@ -401,18 +413,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteGlobalOperationRequest = None, + request: Union[compute.DeleteGlobalOperationRequest, dict] = None, *, project: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteGlobalOperationResponse: r"""Deletes the specified Operations resource. Args: - request (google.cloud.compute_v1.types.DeleteGlobalOperationRequest): + request (Union[google.cloud.compute_v1.types.DeleteGlobalOperationRequest, dict]): The request object. A request message for GlobalOperations.Delete. See the method description for details. @@ -476,18 +488,18 @@ def delete( def get( self, - request: compute.GetGlobalOperationRequest = None, + request: Union[compute.GetGlobalOperationRequest, dict] = None, *, project: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Retrieves the specified Operations resource. Args: - request (google.cloud.compute_v1.types.GetGlobalOperationRequest): + request (Union[google.cloud.compute_v1.types.GetGlobalOperationRequest, dict]): The request object. A request message for GlobalOperations.Get. See the method description for details. @@ -563,10 +575,10 @@ def get( def list( self, - request: compute.ListGlobalOperationsRequest = None, + request: Union[compute.ListGlobalOperationsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -574,7 +586,7 @@ def list( within the specified project. Args: - request (google.cloud.compute_v1.types.ListGlobalOperationsRequest): + request (Union[google.cloud.compute_v1.types.ListGlobalOperationsRequest, dict]): The request object. A request message for GlobalOperations.List. See the method description for details. @@ -637,11 +649,11 @@ def list( def wait( self, - request: compute.WaitGlobalOperationRequest = None, + request: Union[compute.WaitGlobalOperationRequest, dict] = None, *, project: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -660,7 +672,7 @@ def wait( ``DONE``. Args: - request (google.cloud.compute_v1.types.WaitGlobalOperationRequest): + request (Union[google.cloud.compute_v1.types.WaitGlobalOperationRequest, dict]): The request object. A request message for GlobalOperations.Wait. See the method description for details. @@ -734,6 +746,19 @@ def wait( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/global_operations/pagers.py b/google/cloud/compute_v1/services/global_operations/pagers.py index 3c37f2037..153773259 100644 --- a/google/cloud/compute_v1/services/global_operations/pagers.py +++ b/google/cloud/compute_v1/services/global_operations/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.OperationAggregatedList]: + def pages(self) -> Iterator[compute.OperationAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.OperationsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.OperationsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.OperationList]: + def pages(self) -> Iterator[compute.OperationList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Operation]: + def __iter__(self) -> Iterator[compute.Operation]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/global_operations/transports/base.py b/google/cloud/compute_v1/services/global_operations/transports/base.py index ad8a387a5..3756ed05c 100644 --- a/google/cloud/compute_v1/services/global_operations/transports/base.py +++ b/google/cloud/compute_v1/services/global_operations/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class GlobalOperationsTransport(abc.ABC): """Abstract transport class for GlobalOperations.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/global_operations/transports/rest.py b/google/cloud/compute_v1/services/global_operations/transports/rest.py index f51fc90b6..40c3fa731 100644 --- a/google/cloud/compute_v1/services/global_operations/transports/rest.py +++ b/google/cloud/compute_v1/services/global_operations/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + GlobalOperationsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import GlobalOperationsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class GlobalOperationsRestTransport(GlobalOperationsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListGlobalOperationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.OperationAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( GlobalOperations.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/operations".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/operations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListGlobalOperationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListGlobalOperationsRequest.to_json( + compute.AggregatedListGlobalOperationsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListGlobalOperationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListGlobalOperationsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListGlobalOperationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListGlobalOperationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListGlobalOperationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListGlobalOperationsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteGlobalOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteGlobalOperationResponse: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( GlobalOperations.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -185,20 +239,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/operations/{operation}".format( - host=self._host, project=request.project, operation=request.operation, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteGlobalOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteGlobalOperationRequest.to_json( + compute.DeleteGlobalOperationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -210,10 +299,12 @@ def delete( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetGlobalOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the get method over HTTP. @@ -224,6 +315,9 @@ def get( GlobalOperations.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -247,20 +341,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/operations/{operation}".format( - host=self._host, project=request.project, operation=request.operation, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ] + + request_kwargs = compute.GetGlobalOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGlobalOperationRequest.to_json( + compute.GetGlobalOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -270,10 +397,12 @@ def get( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListGlobalOperationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.OperationList: r"""Call the list method over HTTP. @@ -284,6 +413,9 @@ def list( GlobalOperations.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -294,30 +426,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/operations".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/operations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListGlobalOperationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListGlobalOperationsRequest.to_json( + compute.ListGlobalOperationsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListGlobalOperationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListGlobalOperationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListGlobalOperationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListGlobalOperationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListGlobalOperationsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -329,10 +483,12 @@ def list( response.content, ignore_unknown_fields=True ) - def wait( + def _wait( self, request: compute.WaitGlobalOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the wait method over HTTP. @@ -343,6 +499,9 @@ def wait( GlobalOperations.Wait. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -366,20 +525,53 @@ def wait( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/operations/{operation}/wait".format( - host=self._host, project=request.project, operation=request.operation, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/operations/{operation}/wait", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ] + + request_kwargs = compute.WaitGlobalOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.WaitGlobalOperationRequest.to_json( + compute.WaitGlobalOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -389,5 +581,38 @@ def wait( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListGlobalOperationsRequest], compute.OperationAggregatedList + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteGlobalOperationRequest], compute.DeleteGlobalOperationResponse + ]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetGlobalOperationRequest], compute.Operation]: + return self._get + + @property + def list( + self, + ) -> Callable[[compute.ListGlobalOperationsRequest], compute.OperationList]: + return self._list + + @property + def wait(self) -> Callable[[compute.WaitGlobalOperationRequest], compute.Operation]: + return self._wait + + def close(self): + self._session.close() + __all__ = ("GlobalOperationsRestTransport",) diff --git a/google/cloud/compute_v1/services/global_organization_operations/client.py b/google/cloud/compute_v1/services/global_organization_operations/client.py index 6523acb35..e60f8be16 100644 --- a/google/cloud/compute_v1/services/global_organization_operations/client.py +++ b/google/cloud/compute_v1/services/global_organization_operations/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.global_organization_operations import pagers from google.cloud.compute_v1.types import compute from .transports.base import GlobalOrganizationOperationsTransport, DEFAULT_CLIENT_INFO @@ -267,8 +271,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -330,21 +341,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteGlobalOrganizationOperationRequest = None, + request: Union[compute.DeleteGlobalOrganizationOperationRequest, dict] = None, *, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteGlobalOrganizationOperationResponse: r"""Deletes the specified Operations resource. Args: - request (google.cloud.compute_v1.types.DeleteGlobalOrganizationOperationRequest): + request (Union[google.cloud.compute_v1.types.DeleteGlobalOrganizationOperationRequest, dict]): The request object. A request message for GlobalOrganizationOperations.Delete. See the method description for details. @@ -401,10 +413,10 @@ def delete( def get( self, - request: compute.GetGlobalOrganizationOperationRequest = None, + request: Union[compute.GetGlobalOrganizationOperationRequest, dict] = None, *, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -412,7 +424,7 @@ def get( operations by making a ``list()`` request. Args: - request (google.cloud.compute_v1.types.GetGlobalOrganizationOperationRequest): + request (Union[google.cloud.compute_v1.types.GetGlobalOrganizationOperationRequest, dict]): The request object. A request message for GlobalOrganizationOperations.Get. See the method description for details. @@ -481,9 +493,9 @@ def get( def list( self, - request: compute.ListGlobalOrganizationOperationsRequest = None, + request: Union[compute.ListGlobalOrganizationOperationsRequest, dict] = None, *, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -491,7 +503,7 @@ def list( within the specified organization. Args: - request (google.cloud.compute_v1.types.ListGlobalOrganizationOperationsRequest): + request (Union[google.cloud.compute_v1.types.ListGlobalOrganizationOperationsRequest, dict]): The request object. A request message for GlobalOrganizationOperations.List. See the method description for details. @@ -534,6 +546,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/global_organization_operations/pagers.py b/google/cloud/compute_v1/services/global_organization_operations/pagers.py index abef18dba..cdab741fc 100644 --- a/google/cloud/compute_v1/services/global_organization_operations/pagers.py +++ b/google/cloud/compute_v1/services/global_organization_operations/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.OperationList]: + def pages(self) -> Iterator[compute.OperationList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Operation]: + def __iter__(self) -> Iterator[compute.Operation]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/global_organization_operations/transports/base.py b/google/cloud/compute_v1/services/global_organization_operations/transports/base.py index 761c77b4b..025992490 100644 --- a/google/cloud/compute_v1/services/global_organization_operations/transports/base.py +++ b/google/cloud/compute_v1/services/global_organization_operations/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class GlobalOrganizationOperationsTransport(abc.ABC): """Abstract transport class for GlobalOrganizationOperations.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -169,6 +133,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py b/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py index 9b23fd401..344d308cb 100644 --- a/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py +++ b/google/cloud/compute_v1/services/global_organization_operations/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + GlobalOrganizationOperationsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import GlobalOrganizationOperationsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class GlobalOrganizationOperationsRestTransport(GlobalOrganizationOperationsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteGlobalOrganizationOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteGlobalOrganizationOperationResponse: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( GlobalOrganizationOperations.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -123,22 +153,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/operations/{operation}".format( - host=self._host, operation=request.operation, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/locations/global/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ] + + request_kwargs = compute.DeleteGlobalOrganizationOperationRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteGlobalOrganizationOperationRequest.to_json( + compute.DeleteGlobalOrganizationOperationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteGlobalOrganizationOperationRequest.parent_id in request: - query_params["parentId"] = request.parent_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -150,10 +214,12 @@ def delete( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetGlobalOrganizationOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the get method over HTTP. @@ -164,6 +230,9 @@ def get( GlobalOrganizationOperations.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -187,22 +256,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/operations/{operation}".format( - host=self._host, operation=request.operation, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/locations/global/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ] + + request_kwargs = compute.GetGlobalOrganizationOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGlobalOrganizationOperationRequest.to_json( + compute.GetGlobalOrganizationOperationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetGlobalOrganizationOperationRequest.parent_id in request: - query_params["parentId"] = request.parent_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -212,10 +313,12 @@ def get( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListGlobalOrganizationOperationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.OperationList: r"""Call the list method over HTTP. @@ -226,6 +329,9 @@ def list( GlobalOrganizationOperations.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -236,35 +342,39 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/locations/global/operations".format( - host=self._host, - ) + http_options = [ + {"method": "get", "uri": "/compute/v1/locations/global/operations",}, + ] - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListGlobalOrganizationOperationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListGlobalOrganizationOperationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListGlobalOrganizationOperationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListGlobalOrganizationOperationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListGlobalOrganizationOperationsRequest.parent_id in request: - query_params["parentId"] = request.parent_id - if ( - compute.ListGlobalOrganizationOperationsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + request_kwargs = compute.ListGlobalOrganizationOperationsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListGlobalOrganizationOperationsRequest.to_json( + compute.ListGlobalOrganizationOperationsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -276,5 +386,31 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteGlobalOrganizationOperationRequest], + compute.DeleteGlobalOrganizationOperationResponse, + ]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetGlobalOrganizationOperationRequest], compute.Operation]: + return self._get + + @property + def list( + self, + ) -> Callable[ + [compute.ListGlobalOrganizationOperationsRequest], compute.OperationList + ]: + return self._list + + def close(self): + self._session.close() + __all__ = ("GlobalOrganizationOperationsRestTransport",) diff --git a/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py b/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py index 87668b1ce..b8472b006 100644 --- a/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py +++ b/google/cloud/compute_v1/services/global_public_delegated_prefixes/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.global_public_delegated_prefixes import pagers from google.cloud.compute_v1.types import compute from .transports.base import GlobalPublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO @@ -267,8 +271,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -330,22 +341,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteGlobalPublicDelegatedPrefixeRequest = None, + request: Union[compute.DeleteGlobalPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, public_delegated_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified global PublicDelegatedPrefix. Args: - request (google.cloud.compute_v1.types.DeleteGlobalPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.DeleteGlobalPublicDelegatedPrefixeRequest, dict]): The request object. A request message for GlobalPublicDelegatedPrefixes.Delete. See the method description for details. @@ -421,11 +433,11 @@ def delete( def get( self, - request: compute.GetGlobalPublicDelegatedPrefixeRequest = None, + request: Union[compute.GetGlobalPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, public_delegated_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefix: @@ -433,7 +445,7 @@ def get( resource. Args: - request (google.cloud.compute_v1.types.GetGlobalPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.GetGlobalPublicDelegatedPrefixeRequest, dict]): The request object. A request message for GlobalPublicDelegatedPrefixes.Get. See the method description for details. @@ -503,11 +515,11 @@ def get( def insert( self, - request: compute.InsertGlobalPublicDelegatedPrefixeRequest = None, + request: Union[compute.InsertGlobalPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, public_delegated_prefix_resource: compute.PublicDelegatedPrefix = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -516,7 +528,7 @@ def insert( in the request. Args: - request (google.cloud.compute_v1.types.InsertGlobalPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.InsertGlobalPublicDelegatedPrefixeRequest, dict]): The request object. A request message for GlobalPublicDelegatedPrefixes.Insert. See the method description for details. @@ -592,10 +604,10 @@ def insert( def list( self, - request: compute.ListGlobalPublicDelegatedPrefixesRequest = None, + request: Union[compute.ListGlobalPublicDelegatedPrefixesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -603,7 +615,7 @@ def list( project. Args: - request (google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest): + request (Union[google.cloud.compute_v1.types.ListGlobalPublicDelegatedPrefixesRequest, dict]): The request object. A request message for GlobalPublicDelegatedPrefixes.List. See the method description for details. @@ -664,12 +676,12 @@ def list( def patch( self, - request: compute.PatchGlobalPublicDelegatedPrefixeRequest = None, + request: Union[compute.PatchGlobalPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, public_delegated_prefix: str = None, public_delegated_prefix_resource: compute.PublicDelegatedPrefix = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -679,7 +691,7 @@ def patch( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.PatchGlobalPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.PatchGlobalPublicDelegatedPrefixeRequest, dict]): The request object. A request message for GlobalPublicDelegatedPrefixes.Patch. See the method description for details. @@ -764,6 +776,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py b/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py index 37ed65a54..5b0cbccc6 100644 --- a/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py +++ b/google/cloud/compute_v1/services/global_public_delegated_prefixes/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.PublicDelegatedPrefixList]: + def pages(self) -> Iterator[compute.PublicDelegatedPrefixList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.PublicDelegatedPrefix]: + def __iter__(self) -> Iterator[compute.PublicDelegatedPrefix]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py b/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py index 254740127..48c4aed73 100644 --- a/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py +++ b/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class GlobalPublicDelegatedPrefixesTransport(abc.ABC): """Abstract transport class for GlobalPublicDelegatedPrefixes.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py b/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py index bad5300ab..eadcfed12 100644 --- a/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py +++ b/google/cloud/compute_v1/services/global_public_delegated_prefixes/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + GlobalPublicDelegatedPrefixesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import GlobalPublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class GlobalPublicDelegatedPrefixesRestTransport( @@ -55,6 +74,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -82,6 +102,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -100,10 +125,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -114,6 +141,9 @@ def delete( GlobalPublicDelegatedPrefixes.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -137,24 +167,57 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}".format( - host=self._host, - project=request.project, - public_delegated_prefix=request.public_delegated_prefix, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_delegated_prefix", "publicDelegatedPrefix"), + ] + + request_kwargs = compute.DeleteGlobalPublicDelegatedPrefixeRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteGlobalPublicDelegatedPrefixeRequest.to_json( + compute.DeleteGlobalPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteGlobalPublicDelegatedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -164,10 +227,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefix: r"""Call the get method over HTTP. @@ -178,6 +243,9 @@ def get( GlobalPublicDelegatedPrefixes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -195,22 +263,55 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}".format( - host=self._host, - project=request.project, - public_delegated_prefix=request.public_delegated_prefix, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_delegated_prefix", "publicDelegatedPrefix"), + ] + + request_kwargs = compute.GetGlobalPublicDelegatedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGlobalPublicDelegatedPrefixeRequest.to_json( + compute.GetGlobalPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +323,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -236,6 +339,9 @@ def insert( GlobalPublicDelegatedPrefixes.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -259,30 +365,63 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/publicDelegatedPrefixes", + "body": "public_delegated_prefix_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertGlobalPublicDelegatedPrefixeRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PublicDelegatedPrefix.to_json( - request.public_delegated_prefix_resource, + compute.PublicDelegatedPrefix(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicDelegatedPrefixes".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertGlobalPublicDelegatedPrefixeRequest.to_json( + compute.InsertGlobalPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertGlobalPublicDelegatedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -293,10 +432,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListGlobalPublicDelegatedPrefixesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefixList: r"""Call the list method over HTTP. @@ -307,6 +448,9 @@ def list( GlobalPublicDelegatedPrefixes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -315,33 +459,56 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicDelegatedPrefixes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/publicDelegatedPrefixes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListGlobalPublicDelegatedPrefixesRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListGlobalPublicDelegatedPrefixesRequest.to_json( + compute.ListGlobalPublicDelegatedPrefixesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListGlobalPublicDelegatedPrefixesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListGlobalPublicDelegatedPrefixesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListGlobalPublicDelegatedPrefixesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListGlobalPublicDelegatedPrefixesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListGlobalPublicDelegatedPrefixesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -353,10 +520,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchGlobalPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -367,6 +536,9 @@ def patch( GlobalPublicDelegatedPrefixes.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -390,32 +562,64 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}", + "body": "public_delegated_prefix_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_delegated_prefix", "publicDelegatedPrefix"), + ] + + request_kwargs = compute.PatchGlobalPublicDelegatedPrefixeRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PublicDelegatedPrefix.to_json( - request.public_delegated_prefix_resource, + compute.PublicDelegatedPrefix(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}".format( - host=self._host, - project=request.project, - public_delegated_prefix=request.public_delegated_prefix, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchGlobalPublicDelegatedPrefixeRequest.to_json( + compute.PatchGlobalPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchGlobalPublicDelegatedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -426,5 +630,49 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteGlobalPublicDelegatedPrefixeRequest], compute.Operation + ]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetGlobalPublicDelegatedPrefixeRequest], compute.PublicDelegatedPrefix + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertGlobalPublicDelegatedPrefixeRequest], compute.Operation + ]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListGlobalPublicDelegatedPrefixesRequest], + compute.PublicDelegatedPrefixList, + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[ + [compute.PatchGlobalPublicDelegatedPrefixeRequest], compute.Operation + ]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("GlobalPublicDelegatedPrefixesRestTransport",) diff --git a/google/cloud/compute_v1/services/health_checks/client.py b/google/cloud/compute_v1/services/health_checks/client.py index e55915857..2eed09e53 100644 --- a/google/cloud/compute_v1/services/health_checks/client.py +++ b/google/cloud/compute_v1/services/health_checks/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.health_checks import pagers from google.cloud.compute_v1.types import compute from .transports.base import HealthChecksTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,14 +335,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListHealthChecksRequest = None, + request: Union[compute.AggregatedListHealthChecksRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -339,7 +351,7 @@ def aggregated_list( regional and global, available to the specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListHealthChecksRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListHealthChecksRequest, dict]): The request object. A request message for HealthChecks.AggregatedList. See the method description for details. @@ -402,18 +414,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteHealthCheckRequest = None, + request: Union[compute.DeleteHealthCheckRequest, dict] = None, *, project: str = None, health_check: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified HealthCheck resource. Args: - request (google.cloud.compute_v1.types.DeleteHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.DeleteHealthCheckRequest, dict]): The request object. A request message for HealthChecks.Delete. See the method description for details. @@ -489,11 +501,11 @@ def delete( def get( self, - request: compute.GetHealthCheckRequest = None, + request: Union[compute.GetHealthCheckRequest, dict] = None, *, project: str = None, health_check: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheck: @@ -502,7 +514,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.GetHealthCheckRequest, dict]): The request object. A request message for HealthChecks.Get. See the method description for details. @@ -582,11 +594,11 @@ def get( def insert( self, - request: compute.InsertHealthCheckRequest = None, + request: Union[compute.InsertHealthCheckRequest, dict] = None, *, project: str = None, health_check_resource: compute.HealthCheck = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -594,7 +606,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.InsertHealthCheckRequest, dict]): The request object. A request message for HealthChecks.Insert. See the method description for details. @@ -668,10 +680,10 @@ def insert( def list( self, - request: compute.ListHealthChecksRequest = None, + request: Union[compute.ListHealthChecksRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -679,7 +691,7 @@ def list( to the specified project. Args: - request (google.cloud.compute_v1.types.ListHealthChecksRequest): + request (Union[google.cloud.compute_v1.types.ListHealthChecksRequest, dict]): The request object. A request message for HealthChecks.List. See the method description for details. @@ -742,12 +754,12 @@ def list( def patch( self, - request: compute.PatchHealthCheckRequest = None, + request: Union[compute.PatchHealthCheckRequest, dict] = None, *, project: str = None, health_check: str = None, health_check_resource: compute.HealthCheck = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -757,7 +769,7 @@ def patch( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.PatchHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.PatchHealthCheckRequest, dict]): The request object. A request message for HealthChecks.Patch. See the method description for details. @@ -840,12 +852,12 @@ def patch( def update( self, - request: compute.UpdateHealthCheckRequest = None, + request: Union[compute.UpdateHealthCheckRequest, dict] = None, *, project: str = None, health_check: str = None, health_check_resource: compute.HealthCheck = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -853,7 +865,7 @@ def update( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.UpdateHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.UpdateHealthCheckRequest, dict]): The request object. A request message for HealthChecks.Update. See the method description for details. @@ -934,6 +946,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/health_checks/pagers.py b/google/cloud/compute_v1/services/health_checks/pagers.py index 28f28c8e7..08a90d77c 100644 --- a/google/cloud/compute_v1/services/health_checks/pagers.py +++ b/google/cloud/compute_v1/services/health_checks/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.HealthChecksAggregatedList]: + def pages(self) -> Iterator[compute.HealthChecksAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.HealthChecksScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.HealthChecksScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.HealthCheckList]: + def pages(self) -> Iterator[compute.HealthCheckList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.HealthCheck]: + def __iter__(self) -> Iterator[compute.HealthCheck]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/health_checks/transports/base.py b/google/cloud/compute_v1/services/health_checks/transports/base.py index d6806e2b1..20fd583cd 100644 --- a/google/cloud/compute_v1/services/health_checks/transports/base.py +++ b/google/cloud/compute_v1/services/health_checks/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class HealthChecksTransport(abc.ABC): """Abstract transport class for HealthChecks.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -181,6 +145,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/health_checks/transports/rest.py b/google/cloud/compute_v1/services/health_checks/transports/rest.py index 9e2645a4d..ebbaa3f84 100644 --- a/google/cloud/compute_v1/services/health_checks/transports/rest.py +++ b/google/cloud/compute_v1/services/health_checks/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import HealthChecksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import HealthChecksTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class HealthChecksRestTransport(HealthChecksTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListHealthChecksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthChecksAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( HealthChecks.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/healthChecks".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/healthChecks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListHealthChecksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListHealthChecksRequest.to_json( + compute.AggregatedListHealthChecksRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListHealthChecksRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListHealthChecksRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListHealthChecksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListHealthChecksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListHealthChecksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListHealthChecksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( HealthChecks.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,22 +248,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/healthChecks/{health_check}".format( - host=self._host, project=request.project, health_check=request.health_check, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/healthChecks/{health_check}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteHealthCheckRequest.to_json( + compute.DeleteHealthCheckRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -219,10 +304,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheck: r"""Call the get method over HTTP. @@ -233,6 +320,9 @@ def get( HealthChecks.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -261,20 +351,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/healthChecks/{health_check}".format( - host=self._host, project=request.project, health_check=request.health_check, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/healthChecks/{health_check}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ] + + request_kwargs = compute.GetHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetHealthCheckRequest.to_json( + compute.GetHealthCheckRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -286,10 +409,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -300,6 +425,9 @@ def insert( HealthChecks.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -323,30 +451,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/healthChecks", + "body": "health_check_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheck.to_json( - request.health_check_resource, + compute.HealthCheck(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/healthChecks".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertHealthCheckRequest.to_json( + compute.InsertHealthCheckRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -357,10 +514,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListHealthChecksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheckList: r"""Call the list method over HTTP. @@ -371,6 +530,9 @@ def list( HealthChecks.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -381,30 +543,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/healthChecks".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/healthChecks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListHealthChecksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListHealthChecksRequest.to_json( + compute.ListHealthChecksRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListHealthChecksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListHealthChecksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListHealthChecksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListHealthChecksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListHealthChecksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -416,10 +600,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -430,6 +616,9 @@ def patch( HealthChecks.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -453,30 +642,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/healthChecks/{health_check}", + "body": "health_check_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ] + + request_kwargs = compute.PatchHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheck.to_json( - request.health_check_resource, + compute.HealthCheck(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/healthChecks/{health_check}".format( - host=self._host, project=request.project, health_check=request.health_check, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchHealthCheckRequest.to_json( + compute.PatchHealthCheckRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -487,10 +706,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -501,6 +722,9 @@ def update( HealthChecks.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -524,30 +748,60 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/global/healthChecks/{health_check}", + "body": "health_check_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ] + + request_kwargs = compute.UpdateHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheck.to_json( - request.health_check_resource, + compute.HealthCheck(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/healthChecks/{health_check}".format( - host=self._host, project=request.project, health_check=request.health_check, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateHealthCheckRequest.to_json( + compute.UpdateHealthCheckRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -558,5 +812,42 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListHealthChecksRequest], compute.HealthChecksAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteHealthCheckRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetHealthCheckRequest], compute.HealthCheck]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertHealthCheckRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListHealthChecksRequest], compute.HealthCheckList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchHealthCheckRequest], compute.Operation]: + return self._patch + + @property + def update(self) -> Callable[[compute.UpdateHealthCheckRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("HealthChecksRestTransport",) diff --git a/google/cloud/compute_v1/services/image_family_views/client.py b/google/cloud/compute_v1/services/image_family_views/client.py index ff13f17ae..776b537bc 100644 --- a/google/cloud/compute_v1/services/image_family_views/client.py +++ b/google/cloud/compute_v1/services/image_family_views/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.types import compute from .transports.base import ImageFamilyViewsTransport, DEFAULT_CLIENT_INFO from .transports.rest import ImageFamilyViewsRestTransport @@ -262,8 +266,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -325,16 +336,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetImageFamilyViewRequest = None, + request: Union[compute.GetImageFamilyViewRequest, dict] = None, *, project: str = None, zone: str = None, family: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ImageFamilyView: @@ -343,7 +355,7 @@ def get( specified zone. Args: - request (google.cloud.compute_v1.types.GetImageFamilyViewRequest): + request (Union[google.cloud.compute_v1.types.GetImageFamilyViewRequest, dict]): The request object. A request message for ImageFamilyViews.Get. See the method description for details. @@ -411,6 +423,19 @@ def get( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/image_family_views/transports/base.py b/google/cloud/compute_v1/services/image_family_views/transports/base.py index a26aabde6..158aba8b7 100644 --- a/google/cloud/compute_v1/services/image_family_views/transports/base.py +++ b/google/cloud/compute_v1/services/image_family_views/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ImageFamilyViewsTransport(abc.ABC): """Abstract transport class for ImageFamilyViews.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -164,6 +128,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/image_family_views/transports/rest.py b/google/cloud/compute_v1/services/image_family_views/transports/rest.py index 23c983cbb..90c26f8e3 100644 --- a/google/cloud/compute_v1/services/image_family_views/transports/rest.py +++ b/google/cloud/compute_v1/services/image_family_views/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + ImageFamilyViewsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import ImageFamilyViewsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ImageFamilyViewsRestTransport(ImageFamilyViewsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetImageFamilyViewRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ImageFamilyView: r"""Call the get method over HTTP. @@ -112,6 +139,9 @@ def get( ImageFamilyViews.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,23 +150,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/imageFamilyViews/{family}".format( - host=self._host, - project=request.project, - zone=request.zone, - family=request.family, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/imageFamilyViews/{family}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("family", "family"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetImageFamilyViewRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetImageFamilyViewRequest.to_json( + compute.GetImageFamilyViewRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -148,5 +209,14 @@ def get( response.content, ignore_unknown_fields=True ) + @property + def get( + self, + ) -> Callable[[compute.GetImageFamilyViewRequest], compute.ImageFamilyView]: + return self._get + + def close(self): + self._session.close() + __all__ = ("ImageFamilyViewsRestTransport",) diff --git a/google/cloud/compute_v1/services/images/client.py b/google/cloud/compute_v1/services/images/client.py index 1ace37730..cd0337749 100644 --- a/google/cloud/compute_v1/services/images/client.py +++ b/google/cloud/compute_v1/services/images/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.images import pagers from google.cloud.compute_v1.types import compute from .transports.base import ImagesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,22 +335,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteImageRequest = None, + request: Union[compute.DeleteImageRequest, dict] = None, *, project: str = None, image: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified image. Args: - request (google.cloud.compute_v1.types.DeleteImageRequest): + request (Union[google.cloud.compute_v1.types.DeleteImageRequest, dict]): The request object. A request message for Images.Delete. See the method description for details. project (str): @@ -412,12 +424,12 @@ def delete( def deprecate( self, - request: compute.DeprecateImageRequest = None, + request: Union[compute.DeprecateImageRequest, dict] = None, *, project: str = None, image: str = None, deprecation_status_resource: compute.DeprecationStatus = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -426,7 +438,7 @@ def deprecate( instead. Args: - request (google.cloud.compute_v1.types.DeprecateImageRequest): + request (Union[google.cloud.compute_v1.types.DeprecateImageRequest, dict]): The request object. A request message for Images.Deprecate. See the method description for details. @@ -507,11 +519,11 @@ def deprecate( def get( self, - request: compute.GetImageRequest = None, + request: Union[compute.GetImageRequest, dict] = None, *, project: str = None, image: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Image: @@ -519,7 +531,7 @@ def get( images by making a list() request. Args: - request (google.cloud.compute_v1.types.GetImageRequest): + request (Union[google.cloud.compute_v1.types.GetImageRequest, dict]): The request object. A request message for Images.Get. See the method description for details. project (str): @@ -581,11 +593,11 @@ def get( def get_from_family( self, - request: compute.GetFromFamilyImageRequest = None, + request: Union[compute.GetFromFamilyImageRequest, dict] = None, *, project: str = None, family: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Image: @@ -593,7 +605,7 @@ def get_from_family( family and is not deprecated. Args: - request (google.cloud.compute_v1.types.GetFromFamilyImageRequest): + request (Union[google.cloud.compute_v1.types.GetFromFamilyImageRequest, dict]): The request object. A request message for Images.GetFromFamily. See the method description for details. @@ -658,11 +670,11 @@ def get_from_family( def get_iam_policy( self, - request: compute.GetIamPolicyImageRequest = None, + request: Union[compute.GetIamPolicyImageRequest, dict] = None, *, project: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -670,7 +682,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyImageRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyImageRequest, dict]): The request object. A request message for Images.GetIamPolicy. See the method description for details. @@ -770,11 +782,11 @@ def get_iam_policy( def insert( self, - request: compute.InsertImageRequest = None, + request: Union[compute.InsertImageRequest, dict] = None, *, project: str = None, image_resource: compute.Image = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -782,7 +794,7 @@ def insert( data included in the request. Args: - request (google.cloud.compute_v1.types.InsertImageRequest): + request (Union[google.cloud.compute_v1.types.InsertImageRequest, dict]): The request object. A request message for Images.Insert. See the method description for details. project (str): @@ -855,10 +867,10 @@ def insert( def list( self, - request: compute.ListImagesRequest = None, + request: Union[compute.ListImagesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -872,7 +884,7 @@ def list( as debian-cloud or windows-cloud. Args: - request (google.cloud.compute_v1.types.ListImagesRequest): + request (Union[google.cloud.compute_v1.types.ListImagesRequest, dict]): The request object. A request message for Images.List. See the method description for details. project (str): @@ -933,12 +945,12 @@ def list( def patch( self, - request: compute.PatchImageRequest = None, + request: Union[compute.PatchImageRequest, dict] = None, *, project: str = None, image: str = None, image_resource: compute.Image = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -947,7 +959,7 @@ def patch( family, description, deprecation status. Args: - request (google.cloud.compute_v1.types.PatchImageRequest): + request (Union[google.cloud.compute_v1.types.PatchImageRequest, dict]): The request object. A request message for Images.Patch. See the method description for details. project (str): @@ -1027,12 +1039,12 @@ def patch( def set_iam_policy( self, - request: compute.SetIamPolicyImageRequest = None, + request: Union[compute.SetIamPolicyImageRequest, dict] = None, *, project: str = None, resource: str = None, global_set_policy_request_resource: compute.GlobalSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1040,7 +1052,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyImageRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyImageRequest, dict]): The request object. A request message for Images.SetIamPolicy. See the method description for details. @@ -1151,12 +1163,12 @@ def set_iam_policy( def set_labels( self, - request: compute.SetLabelsImageRequest = None, + request: Union[compute.SetLabelsImageRequest, dict] = None, *, project: str = None, resource: str = None, global_set_labels_request_resource: compute.GlobalSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1164,7 +1176,7 @@ def set_labels( labels, read the Labeling Resources documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsImageRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsImageRequest, dict]): The request object. A request message for Images.SetLabels. See the method description for details. @@ -1251,12 +1263,12 @@ def set_labels( def test_iam_permissions( self, - request: compute.TestIamPermissionsImageRequest = None, + request: Union[compute.TestIamPermissionsImageRequest, dict] = None, *, project: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1264,7 +1276,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsImageRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsImageRequest, dict]): The request object. A request message for Images.TestIamPermissions. See the method description for details. @@ -1334,6 +1346,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/images/pagers.py b/google/cloud/compute_v1/services/images/pagers.py index 2bcf2aea7..67d8a8f62 100644 --- a/google/cloud/compute_v1/services/images/pagers.py +++ b/google/cloud/compute_v1/services/images/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ImageList]: + def pages(self) -> Iterator[compute.ImageList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Image]: + def __iter__(self) -> Iterator[compute.Image]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/images/transports/base.py b/google/cloud/compute_v1/services/images/transports/base.py index eeb769956..5f459233e 100644 --- a/google/cloud/compute_v1/services/images/transports/base.py +++ b/google/cloud/compute_v1/services/images/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ImagesTransport(abc.ABC): """Abstract transport class for Images.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -195,6 +159,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/images/transports/rest.py b/google/cloud/compute_v1/services/images/transports/rest.py index aaa1811cd..6c505521a 100644 --- a/google/cloud/compute_v1/services/images/transports/rest.py +++ b/google/cloud/compute_v1/services/images/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ImagesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import ImagesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ImagesRestTransport(ImagesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -111,6 +135,9 @@ def delete( The request object. A request message for Images.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -134,22 +161,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{image}".format( - host=self._host, project=request.project, image=request.image, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/images/{image}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("image", "image"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteImageRequest.to_json( + compute.DeleteImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteImageRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -159,10 +217,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def deprecate( + def _deprecate( self, request: compute.DeprecateImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the deprecate method over HTTP. @@ -173,6 +233,9 @@ def deprecate( Images.Deprecate. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -196,30 +259,60 @@ def deprecate( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/images/{image}/deprecate", + "body": "deprecation_status_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("image", "image"), + ("project", "project"), + ] + + request_kwargs = compute.DeprecateImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.DeprecationStatus.to_json( - request.deprecation_status_resource, + compute.DeprecationStatus(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{image}/deprecate".format( - host=self._host, project=request.project, image=request.image, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeprecateImageRequest.to_json( + compute.DeprecateImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeprecateImageRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -230,10 +323,12 @@ def deprecate( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Image: r"""Call the get method over HTTP. @@ -243,6 +338,9 @@ def get( The request object. A request message for Images.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -255,20 +353,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{image}".format( - host=self._host, project=request.project, image=request.image, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/images/{image}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("image", "image"), + ("project", "project"), + ] + + request_kwargs = compute.GetImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetImageRequest.to_json( + compute.GetImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -278,10 +409,12 @@ def get( # Return the response return compute.Image.from_json(response.content, ignore_unknown_fields=True) - def get_from_family( + def _get_from_family( self, request: compute.GetFromFamilyImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Image: r"""Call the get from family method over HTTP. @@ -292,6 +425,9 @@ def get_from_family( Images.GetFromFamily. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -304,20 +440,53 @@ def get_from_family( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/family/{family}".format( - host=self._host, project=request.project, family=request.family, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/images/family/{family}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("family", "family"), + ("project", "project"), + ] + + request_kwargs = compute.GetFromFamilyImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetFromFamilyImageRequest.to_json( + compute.GetFromFamilyImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -327,10 +496,12 @@ def get_from_family( # Return the response return compute.Image.from_json(response.content, ignore_unknown_fields=True) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -341,6 +512,9 @@ def get_iam_policy( Images.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -389,24 +563,53 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{resource}/getIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/images/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyImageRequest.to_json( + compute.GetIamPolicyImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetIamPolicyImageRequest.options_requested_policy_version in request: - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -416,10 +619,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -429,6 +634,9 @@ def insert( The request object. A request message for Images.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -452,32 +660,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/images", + "body": "image_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Image.to_json( - request.image_resource, + compute.Image(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertImageRequest.to_json( + compute.InsertImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertImageRequest.force_create in request: - query_params["forceCreate"] = request.force_create - if compute.InsertImageRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -488,10 +723,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListImagesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ImageList: r"""Call the list method over HTTP. @@ -501,6 +738,9 @@ def list( The request object. A request message for Images.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -509,30 +749,49 @@ def list( Contains a list of images. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/global/images",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListImagesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListImagesRequest.to_json( + compute.ListImagesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListImagesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListImagesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListImagesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListImagesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListImagesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -542,10 +801,12 @@ def list( # Return the response return compute.ImageList.from_json(response.content, ignore_unknown_fields=True) - def patch( + def _patch( self, request: compute.PatchImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -555,6 +816,9 @@ def patch( The request object. A request message for Images.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -578,30 +842,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/images/{image}", + "body": "image_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("image", "image"), + ("project", "project"), + ] + + request_kwargs = compute.PatchImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Image.to_json( - request.image_resource, + compute.Image(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{image}".format( - host=self._host, project=request.project, image=request.image, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchImageRequest.to_json( + compute.PatchImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchImageRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -612,10 +906,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -626,6 +922,9 @@ def set_iam_policy( Images.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -674,28 +973,60 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/images/{resource}/setIamPolicy", + "body": "global_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetPolicyRequest.to_json( - request.global_set_policy_request_resource, + compute.GlobalSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{resource}/setIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyImageRequest.to_json( + compute.SetIamPolicyImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -706,10 +1037,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -720,6 +1053,9 @@ def set_labels( Images.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -743,28 +1079,60 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/images/{resource}/setLabels", + "body": "global_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetLabelsRequest.to_json( - request.global_set_labels_request_resource, + compute.GlobalSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{resource}/setLabels".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsImageRequest.to_json( + compute.SetLabelsImageRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -775,10 +1143,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsImageRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -789,6 +1159,9 @@ def test_iam_permissions( Images.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -797,28 +1170,62 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/images/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsImageRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/images/{resource}/testIamPermissions".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsImageRequest.to_json( + compute.TestIamPermissionsImageRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -831,5 +1238,64 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def delete(self) -> Callable[[compute.DeleteImageRequest], compute.Operation]: + return self._delete + + @property + def deprecate(self) -> Callable[[compute.DeprecateImageRequest], compute.Operation]: + return self._deprecate + + @property + def get(self) -> Callable[[compute.GetImageRequest], compute.Image]: + return self._get + + @property + def get_from_family( + self, + ) -> Callable[[compute.GetFromFamilyImageRequest], compute.Image]: + return self._get_from_family + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyImageRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertImageRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListImagesRequest], compute.ImageList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchImageRequest], compute.Operation]: + return self._patch + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyImageRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsImageRequest], compute.Operation]: + return self._set_labels + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsImageRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("ImagesRestTransport",) diff --git a/google/cloud/compute_v1/services/instance_group_managers/client.py b/google/cloud/compute_v1/services/instance_group_managers/client.py index e54de72c2..27df5583f 100644 --- a/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.instance_group_managers import pagers from google.cloud.compute_v1.types import compute from .transports.base import InstanceGroupManagersTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,17 +339,20 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def abandon_instances( self, - request: compute.AbandonInstancesInstanceGroupManagerRequest = None, + request: Union[ + compute.AbandonInstancesInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_abandon_instances_request_resource: compute.InstanceGroupManagersAbandonInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -360,7 +374,7 @@ def abandon_instances( with this method per request. Args: - request (google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.AbandonInstancesInstanceGroupManagerRequest, dict]): The request object. Messages A request message for InstanceGroupManagers.AbandonInstances. See the method @@ -462,10 +476,10 @@ def abandon_instances( def aggregated_list( self, - request: compute.AggregatedListInstanceGroupManagersRequest = None, + request: Union[compute.AggregatedListInstanceGroupManagersRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -473,7 +487,7 @@ def aggregated_list( groups them by zone. Args: - request (google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListInstanceGroupManagersRequest, dict]): The request object. A request message for InstanceGroupManagers.AggregatedList. See the method description for details. @@ -534,13 +548,15 @@ def aggregated_list( def apply_updates_to_instances( self, - request: compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest = None, + request: Union[ + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_apply_updates_request_resource: compute.InstanceGroupManagersApplyUpdatesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -549,7 +565,7 @@ def apply_updates_to_instances( overrides and/or new versions. Args: - request (google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.ApplyUpdatesToInstancesInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.ApplyUpdatesToInstances. See the method description for details. @@ -657,13 +673,13 @@ def apply_updates_to_instances( def create_instances( self, - request: compute.CreateInstancesInstanceGroupManagerRequest = None, + request: Union[compute.CreateInstancesInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_create_instances_request_resource: compute.InstanceGroupManagersCreateInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -677,7 +693,7 @@ def create_instances( method. Args: - request (google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.CreateInstancesInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.CreateInstances. See the method description for details. @@ -779,12 +795,12 @@ def create_instances( def delete( self, - request: compute.DeleteInstanceGroupManagerRequest = None, + request: Union[compute.DeleteInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -794,7 +810,7 @@ def delete( Deleting an instance group for more information. Args: - request (google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.Delete. See the method description for details. @@ -879,13 +895,13 @@ def delete( def delete_instances( self, - request: compute.DeleteInstancesInstanceGroupManagerRequest = None, + request: Union[compute.DeleteInstancesInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_delete_instances_request_resource: compute.InstanceGroupManagersDeleteInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -906,7 +922,7 @@ def delete_instances( method per request. Args: - request (google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.DeleteInstancesInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.DeleteInstances. See the method description for details. @@ -1007,13 +1023,15 @@ def delete_instances( def delete_per_instance_configs( self, - request: compute.DeletePerInstanceConfigsInstanceGroupManagerRequest = None, + request: Union[ + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_delete_per_instance_configs_req_resource: compute.InstanceGroupManagersDeletePerInstanceConfigsReq = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1021,7 +1039,7 @@ def delete_per_instance_configs( instance group. Args: - request (google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.DeletePerInstanceConfigsInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.DeletePerInstanceConfigs. See the method description for details. @@ -1132,12 +1150,12 @@ def delete_per_instance_configs( def get( self, - request: compute.GetInstanceGroupManagerRequest = None, + request: Union[compute.GetInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManager: @@ -1146,7 +1164,7 @@ def get( instance groups by making a list() request. Args: - request (google.cloud.compute_v1.types.GetInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.GetInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.Get. See the method description for details. @@ -1225,12 +1243,12 @@ def get( def insert( self, - request: compute.InsertInstanceGroupManagerRequest = None, + request: Union[compute.InsertInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager_resource: compute.InstanceGroupManager = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1247,7 +1265,7 @@ def insert( increase in this limit. Args: - request (google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.Insert. See the method description for details. @@ -1332,11 +1350,11 @@ def insert( def list( self, - request: compute.ListInstanceGroupManagersRequest = None, + request: Union[compute.ListInstanceGroupManagersRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1344,7 +1362,7 @@ def list( contained within the specified project and zone. Args: - request (google.cloud.compute_v1.types.ListInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListInstanceGroupManagersRequest, dict]): The request object. A request message for InstanceGroupManagers.List. See the method description for details. @@ -1415,12 +1433,12 @@ def list( def list_errors( self, - request: compute.ListErrorsInstanceGroupManagersRequest = None, + request: Union[compute.ListErrorsInstanceGroupManagersRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListErrorsPager: @@ -1429,7 +1447,7 @@ def list_errors( query parameters are not supported. Args: - request (google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListErrorsInstanceGroupManagersRequest, dict]): The request object. A request message for InstanceGroupManagers.ListErrors. See the method description for details. @@ -1511,12 +1529,14 @@ def list_errors( def list_managed_instances( self, - request: compute.ListManagedInstancesInstanceGroupManagersRequest = None, + request: Union[ + compute.ListManagedInstancesInstanceGroupManagersRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListManagedInstancesPager: @@ -1530,7 +1550,7 @@ def list_managed_instances( query parameter is not supported. Args: - request (google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest, dict]): The request object. A request message for InstanceGroupManagers.ListManagedInstances. See the method description for details. @@ -1611,12 +1631,14 @@ def list_managed_instances( def list_per_instance_configs( self, - request: compute.ListPerInstanceConfigsInstanceGroupManagersRequest = None, + request: Union[ + compute.ListPerInstanceConfigsInstanceGroupManagersRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPerInstanceConfigsPager: @@ -1625,7 +1647,7 @@ def list_per_instance_configs( not supported. Args: - request (google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListPerInstanceConfigsInstanceGroupManagersRequest, dict]): The request object. A request message for InstanceGroupManagers.ListPerInstanceConfigs. See the method description for details. @@ -1711,13 +1733,13 @@ def list_per_instance_configs( def patch( self, - request: compute.PatchInstanceGroupManagerRequest = None, + request: Union[compute.PatchInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_manager_resource: compute.InstanceGroupManager = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1737,7 +1759,7 @@ def patch( in a MIG, see Updating instances in a MIG. Args: - request (google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.PatchInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.Patch. See the method description for details. @@ -1833,13 +1855,15 @@ def patch( def patch_per_instance_configs( self, - request: compute.PatchPerInstanceConfigsInstanceGroupManagerRequest = None, + request: Union[ + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_patch_per_instance_configs_req_resource: compute.InstanceGroupManagersPatchPerInstanceConfigsReq = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1849,7 +1873,7 @@ def patch_per_instance_configs( patch. Args: - request (google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.PatchPerInstanceConfigsInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.PatchPerInstanceConfigs. See the method description for details. @@ -1960,13 +1984,15 @@ def patch_per_instance_configs( def recreate_instances( self, - request: compute.RecreateInstancesInstanceGroupManagerRequest = None, + request: Union[ + compute.RecreateInstancesInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_recreate_instances_request_resource: compute.InstanceGroupManagersRecreateInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1986,7 +2012,7 @@ def recreate_instances( method per request. Args: - request (google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.RecreateInstancesInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.RecreateInstances. See the method description for details. @@ -2089,13 +2115,13 @@ def recreate_instances( def resize( self, - request: compute.ResizeInstanceGroupManagerRequest = None, + request: Union[compute.ResizeInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, size: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2121,7 +2147,7 @@ def resize( or deleted. Args: - request (google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.ResizeInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.Resize. See the method description for details. @@ -2219,13 +2245,15 @@ def resize( def set_instance_template( self, - request: compute.SetInstanceTemplateInstanceGroupManagerRequest = None, + request: Union[ + compute.SetInstanceTemplateInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_set_instance_template_request_resource: compute.InstanceGroupManagersSetInstanceTemplateRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2236,7 +2264,7 @@ def set_instance_template( the group's updatePolicy.type to PROACTIVE. Args: - request (google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.SetInstanceTemplateInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.SetInstanceTemplate. See the method description for details. @@ -2342,13 +2370,13 @@ def set_instance_template( def set_target_pools( self, - request: compute.SetTargetPoolsInstanceGroupManagerRequest = None, + request: Union[compute.SetTargetPoolsInstanceGroupManagerRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_set_target_pools_request_resource: compute.InstanceGroupManagersSetTargetPoolsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2362,7 +2390,7 @@ def set_target_pools( group depending on the size of the group. Args: - request (google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.SetTargetPoolsInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.SetTargetPools. See the method description for details. @@ -2463,13 +2491,15 @@ def set_target_pools( def update_per_instance_configs( self, - request: compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest = None, + request: Union[ + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, zone: str = None, instance_group_manager: str = None, instance_group_managers_update_per_instance_configs_req_resource: compute.InstanceGroupManagersUpdatePerInstanceConfigsReq = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2479,7 +2509,7 @@ def update_per_instance_configs( patch. Args: - request (google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.UpdatePerInstanceConfigsInstanceGroupManagerRequest, dict]): The request object. A request message for InstanceGroupManagers.UpdatePerInstanceConfigs. See the method description for details. @@ -2588,6 +2618,19 @@ def update_per_instance_configs( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/instance_group_managers/pagers.py b/google/cloud/compute_v1/services/instance_group_managers/pagers.py index 50bd2b911..42e386f83 100644 --- a/google/cloud/compute_v1/services/instance_group_managers/pagers.py +++ b/google/cloud/compute_v1/services/instance_group_managers/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceGroupManagerAggregatedList]: + def pages(self) -> Iterator[compute.InstanceGroupManagerAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.InstanceGroupManagersScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.InstanceGroupManagersScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceGroupManagerList]: + def pages(self) -> Iterator[compute.InstanceGroupManagerList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceGroupManager]: + def __iter__(self) -> Iterator[compute.InstanceGroupManager]: for page in self.pages: yield from page.items @@ -201,14 +201,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceGroupManagersListErrorsResponse]: + def pages(self) -> Iterator[compute.InstanceGroupManagersListErrorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceManagedByIgmError]: + def __iter__(self) -> Iterator[compute.InstanceManagedByIgmError]: for page in self.pages: yield from page.items @@ -269,14 +269,14 @@ def __getattr__(self, name: str) -> Any: @property def pages( self, - ) -> Iterable[compute.InstanceGroupManagersListManagedInstancesResponse]: + ) -> Iterator[compute.InstanceGroupManagersListManagedInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ManagedInstance]: + def __iter__(self) -> Iterator[compute.ManagedInstance]: for page in self.pages: yield from page.managed_instances @@ -335,14 +335,14 @@ def __getattr__(self, name: str) -> Any: @property def pages( self, - ) -> Iterable[compute.InstanceGroupManagersListPerInstanceConfigsResp]: + ) -> Iterator[compute.InstanceGroupManagersListPerInstanceConfigsResp]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.PerInstanceConfig]: + def __iter__(self) -> Iterator[compute.PerInstanceConfig]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/instance_group_managers/transports/base.py b/google/cloud/compute_v1/services/instance_group_managers/transports/base.py index d2868d78e..a78dc34ec 100644 --- a/google/cloud/compute_v1/services/instance_group_managers/transports/base.py +++ b/google/cloud/compute_v1/services/instance_group_managers/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InstanceGroupManagersTransport(abc.ABC): """Abstract transport class for InstanceGroupManagers.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -234,6 +198,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def abandon_instances( self, diff --git a/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py b/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py index 714ff3f4a..4f075b0a1 100644 --- a/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py +++ b/google/cloud/compute_v1/services/instance_group_managers/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + InstanceGroupManagersTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import InstanceGroupManagersTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InstanceGroupManagersRestTransport(InstanceGroupManagersTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def abandon_instances( + def _abandon_instances( self, request: compute.AbandonInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the abandon instances method over HTTP. @@ -113,6 +140,9 @@ def abandon_instances( InstanceGroupManagers.AbandonInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -136,33 +166,67 @@ def abandon_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/abandonInstances", + "body": "instance_group_managers_abandon_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AbandonInstancesInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersAbandonInstancesRequest.to_json( - request.instance_group_managers_abandon_instances_request_resource, + compute.InstanceGroupManagersAbandonInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/abandonInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AbandonInstancesInstanceGroupManagerRequest.to_json( + compute.AbandonInstancesInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AbandonInstancesInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -173,10 +237,12 @@ def abandon_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManagerAggregatedList: r"""Call the aggregated list method over HTTP. @@ -187,6 +253,9 @@ def aggregated_list( InstanceGroupManagers.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -195,38 +264,56 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/instanceGroupManagers".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.AggregatedListInstanceGroupManagersRequest.include_all_scopes - in request - ): - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListInstanceGroupManagersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListInstanceGroupManagersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListInstanceGroupManagersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/instanceGroupManagers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListInstanceGroupManagersRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListInstanceGroupManagersRequest.to_json( + compute.AggregatedListInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -238,10 +325,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def apply_updates_to_instances( + def _apply_updates_to_instances( self, request: compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the apply updates to @@ -253,6 +342,9 @@ def apply_updates_to_instances( InstanceGroupManagers.ApplyUpdatesToInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -276,31 +368,67 @@ def apply_updates_to_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances", + "body": "instance_group_managers_apply_updates_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersApplyUpdatesRequest.to_json( - request.instance_group_managers_apply_updates_request_resource, + compute.InstanceGroupManagersApplyUpdatesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest.to_json( + compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -311,10 +439,12 @@ def apply_updates_to_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def create_instances( + def _create_instances( self, request: compute.CreateInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the create instances method over HTTP. @@ -325,6 +455,9 @@ def create_instances( InstanceGroupManagers.CreateInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -348,33 +481,67 @@ def create_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/createInstances", + "body": "instance_group_managers_create_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.CreateInstancesInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersCreateInstancesRequest.to_json( - request.instance_group_managers_create_instances_request_resource, + compute.InstanceGroupManagersCreateInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/createInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.CreateInstancesInstanceGroupManagerRequest.to_json( + compute.CreateInstancesInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.CreateInstancesInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -385,10 +552,12 @@ def create_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -399,6 +568,9 @@ def delete( InstanceGroupManagers.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -422,25 +594,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInstanceGroupManagerRequest.to_json( + compute.DeleteInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -450,10 +653,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_instances( + def _delete_instances( self, request: compute.DeleteInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete instances method over HTTP. @@ -464,6 +669,9 @@ def delete_instances( InstanceGroupManagers.DeleteInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -487,33 +695,67 @@ def delete_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deleteInstances", + "body": "instance_group_managers_delete_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteInstancesInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersDeleteInstancesRequest.to_json( - request.instance_group_managers_delete_instances_request_resource, + compute.InstanceGroupManagersDeleteInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deleteInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInstancesInstanceGroupManagerRequest.to_json( + compute.DeleteInstancesInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInstancesInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -524,10 +766,12 @@ def delete_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_per_instance_configs( + def _delete_per_instance_configs( self, request: compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete per instance @@ -539,6 +783,9 @@ def delete_per_instance_configs( InstanceGroupManagers.DeletePerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -562,31 +809,67 @@ def delete_per_instance_configs( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs", + "body": "instance_group_managers_delete_per_instance_configs_req_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersDeletePerInstanceConfigsReq.to_json( - request.instance_group_managers_delete_per_instance_configs_req_resource, + compute.InstanceGroupManagersDeletePerInstanceConfigsReq( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest.to_json( + compute.DeletePerInstanceConfigsInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -597,10 +880,12 @@ def delete_per_instance_configs( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManager: r"""Call the get method over HTTP. @@ -611,6 +896,9 @@ def get( InstanceGroupManagers.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -628,23 +916,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInstanceGroupManagerRequest.to_json( + compute.GetInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -656,10 +977,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -670,6 +993,9 @@ def insert( InstanceGroupManagers.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -693,30 +1019,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers", + "body": "instance_group_manager_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManager.to_json( - request.instance_group_manager_resource, + compute.InstanceGroupManager(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertInstanceGroupManagerRequest.to_json( + compute.InsertInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -727,10 +1085,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManagerList: r"""Call the list method over HTTP. @@ -741,6 +1101,9 @@ def list( InstanceGroupManagers.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -749,30 +1112,55 @@ def list( [Output Only] A list of managed instance groups. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers".format( - host=self._host, project=request.project, zone=request.zone, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInstanceGroupManagersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInstanceGroupManagersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInstanceGroupManagersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInstanceGroupManagersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListInstanceGroupManagersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInstanceGroupManagersRequest.to_json( + compute.ListInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -784,10 +1172,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_errors( + def _list_errors( self, request: compute.ListErrorsInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManagersListErrorsResponse: r"""Call the list errors method over HTTP. @@ -798,6 +1188,9 @@ def list_errors( InstanceGroupManagers.ListErrors. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -806,36 +1199,56 @@ def list_errors( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listErrors".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListErrorsInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListErrorsInstanceGroupManagersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListErrorsInstanceGroupManagersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListErrorsInstanceGroupManagersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListErrorsInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listErrors", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListErrorsInstanceGroupManagersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListErrorsInstanceGroupManagersRequest.to_json( + compute.ListErrorsInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -847,10 +1260,12 @@ def list_errors( response.content, ignore_unknown_fields=True ) - def list_managed_instances( + def _list_managed_instances( self, request: compute.ListManagedInstancesInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManagersListManagedInstancesResponse: r"""Call the list managed instances method over HTTP. @@ -861,6 +1276,9 @@ def list_managed_instances( InstanceGroupManagers.ListManagedInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -869,42 +1287,58 @@ def list_managed_instances( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listManagedInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListManagedInstancesInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.ListManagedInstancesInstanceGroupManagersRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if compute.ListManagedInstancesInstanceGroupManagersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if ( - compute.ListManagedInstancesInstanceGroupManagersRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListManagedInstancesInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listManagedInstances", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListManagedInstancesInstanceGroupManagersRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListManagedInstancesInstanceGroupManagersRequest.to_json( + compute.ListManagedInstancesInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -916,10 +1350,12 @@ def list_managed_instances( response.content, ignore_unknown_fields=True ) - def list_per_instance_configs( + def _list_per_instance_configs( self, request: compute.ListPerInstanceConfigsInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManagersListPerInstanceConfigsResp: r"""Call the list per instance configs method over HTTP. @@ -930,6 +1366,9 @@ def list_per_instance_configs( InstanceGroupManagers.ListPerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -938,45 +1377,58 @@ def list_per_instance_configs( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListPerInstanceConfigsInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.ListPerInstanceConfigsInstanceGroupManagersRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if ( - compute.ListPerInstanceConfigsInstanceGroupManagersRequest.order_by - in request - ): - query_params["orderBy"] = request.order_by - if ( - compute.ListPerInstanceConfigsInstanceGroupManagersRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListPerInstanceConfigsInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListPerInstanceConfigsInstanceGroupManagersRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPerInstanceConfigsInstanceGroupManagersRequest.to_json( + compute.ListPerInstanceConfigsInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -988,10 +1440,12 @@ def list_per_instance_configs( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -1002,6 +1456,9 @@ def patch( InstanceGroupManagers.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1025,33 +1482,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}", + "body": "instance_group_manager_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.PatchInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManager.to_json( - request.instance_group_manager_resource, + compute.InstanceGroupManager(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchInstanceGroupManagerRequest.to_json( + compute.PatchInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1062,10 +1549,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def patch_per_instance_configs( + def _patch_per_instance_configs( self, request: compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch per instance @@ -1077,6 +1566,9 @@ def patch_per_instance_configs( InstanceGroupManagers.PatchPerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1100,36 +1592,67 @@ def patch_per_instance_configs( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs", + "body": "instance_group_managers_patch_per_instance_configs_req_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersPatchPerInstanceConfigsReq.to_json( - request.instance_group_managers_patch_per_instance_configs_req_resource, + compute.InstanceGroupManagersPatchPerInstanceConfigsReq( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.to_json( + compute.PatchPerInstanceConfigsInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.PatchPerInstanceConfigsInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1140,10 +1663,12 @@ def patch_per_instance_configs( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def recreate_instances( + def _recreate_instances( self, request: compute.RecreateInstancesInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the recreate instances method over HTTP. @@ -1154,6 +1679,9 @@ def recreate_instances( InstanceGroupManagers.RecreateInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1177,33 +1705,67 @@ def recreate_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/recreateInstances", + "body": "instance_group_managers_recreate_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.RecreateInstancesInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersRecreateInstancesRequest.to_json( - request.instance_group_managers_recreate_instances_request_resource, + compute.InstanceGroupManagersRecreateInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/recreateInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RecreateInstancesInstanceGroupManagerRequest.to_json( + compute.RecreateInstancesInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RecreateInstancesInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1214,10 +1776,12 @@ def recreate_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def resize( + def _resize( self, request: compute.ResizeInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the resize method over HTTP. @@ -1228,6 +1792,9 @@ def resize( InstanceGroupManagers.Resize. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1251,26 +1818,57 @@ def resize( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resize".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resize", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("size", "size"), + ("zone", "zone"), + ] + + request_kwargs = compute.ResizeInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ResizeInstanceGroupManagerRequest.to_json( + compute.ResizeInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ResizeInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id - query_params["size"] = request.size + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1280,10 +1878,12 @@ def resize( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_instance_template( + def _set_instance_template( self, request: compute.SetInstanceTemplateInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set instance template method over HTTP. @@ -1294,6 +1894,9 @@ def set_instance_template( InstanceGroupManagers.SetInstanceTemplate. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1317,33 +1920,67 @@ def set_instance_template( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate", + "body": "instance_group_managers_set_instance_template_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetInstanceTemplateInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersSetInstanceTemplateRequest.to_json( - request.instance_group_managers_set_instance_template_request_resource, + compute.InstanceGroupManagersSetInstanceTemplateRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetInstanceTemplateInstanceGroupManagerRequest.to_json( + compute.SetInstanceTemplateInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetInstanceTemplateInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1354,10 +1991,12 @@ def set_instance_template( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_target_pools( + def _set_target_pools( self, request: compute.SetTargetPoolsInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set target pools method over HTTP. @@ -1368,6 +2007,9 @@ def set_target_pools( InstanceGroupManagers.SetTargetPools. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1391,33 +2033,67 @@ def set_target_pools( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setTargetPools", + "body": "instance_group_managers_set_target_pools_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetTargetPoolsInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersSetTargetPoolsRequest.to_json( - request.instance_group_managers_set_target_pools_request_resource, + compute.InstanceGroupManagersSetTargetPoolsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setTargetPools".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetTargetPoolsInstanceGroupManagerRequest.to_json( + compute.SetTargetPoolsInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetTargetPoolsInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1428,10 +2104,12 @@ def set_target_pools( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_per_instance_configs( + def _update_per_instance_configs( self, request: compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update per instance @@ -1443,6 +2121,9 @@ def update_per_instance_configs( InstanceGroupManagers.UpdatePerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1466,36 +2147,67 @@ def update_per_instance_configs( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs", + "body": "instance_group_managers_update_per_instance_configs_req_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManagersUpdatePerInstanceConfigsReq.to_json( - request.instance_group_managers_update_per_instance_configs_req_resource, + compute.InstanceGroupManagersUpdatePerInstanceConfigsReq( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.to_json( + compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1506,5 +2218,164 @@ def update_per_instance_configs( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def abandon_instances( + self, + ) -> Callable[ + [compute.AbandonInstancesInstanceGroupManagerRequest], compute.Operation + ]: + return self._abandon_instances + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListInstanceGroupManagersRequest], + compute.InstanceGroupManagerAggregatedList, + ]: + return self._aggregated_list + + @property + def apply_updates_to_instances( + self, + ) -> Callable[ + [compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest], compute.Operation + ]: + return self._apply_updates_to_instances + + @property + def create_instances( + self, + ) -> Callable[ + [compute.CreateInstancesInstanceGroupManagerRequest], compute.Operation + ]: + return self._create_instances + + @property + def delete( + self, + ) -> Callable[[compute.DeleteInstanceGroupManagerRequest], compute.Operation]: + return self._delete + + @property + def delete_instances( + self, + ) -> Callable[ + [compute.DeleteInstancesInstanceGroupManagerRequest], compute.Operation + ]: + return self._delete_instances + + @property + def delete_per_instance_configs( + self, + ) -> Callable[ + [compute.DeletePerInstanceConfigsInstanceGroupManagerRequest], compute.Operation + ]: + return self._delete_per_instance_configs + + @property + def get( + self, + ) -> Callable[ + [compute.GetInstanceGroupManagerRequest], compute.InstanceGroupManager + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertInstanceGroupManagerRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListInstanceGroupManagersRequest], compute.InstanceGroupManagerList + ]: + return self._list + + @property + def list_errors( + self, + ) -> Callable[ + [compute.ListErrorsInstanceGroupManagersRequest], + compute.InstanceGroupManagersListErrorsResponse, + ]: + return self._list_errors + + @property + def list_managed_instances( + self, + ) -> Callable[ + [compute.ListManagedInstancesInstanceGroupManagersRequest], + compute.InstanceGroupManagersListManagedInstancesResponse, + ]: + return self._list_managed_instances + + @property + def list_per_instance_configs( + self, + ) -> Callable[ + [compute.ListPerInstanceConfigsInstanceGroupManagersRequest], + compute.InstanceGroupManagersListPerInstanceConfigsResp, + ]: + return self._list_per_instance_configs + + @property + def patch( + self, + ) -> Callable[[compute.PatchInstanceGroupManagerRequest], compute.Operation]: + return self._patch + + @property + def patch_per_instance_configs( + self, + ) -> Callable[ + [compute.PatchPerInstanceConfigsInstanceGroupManagerRequest], compute.Operation + ]: + return self._patch_per_instance_configs + + @property + def recreate_instances( + self, + ) -> Callable[ + [compute.RecreateInstancesInstanceGroupManagerRequest], compute.Operation + ]: + return self._recreate_instances + + @property + def resize( + self, + ) -> Callable[[compute.ResizeInstanceGroupManagerRequest], compute.Operation]: + return self._resize + + @property + def set_instance_template( + self, + ) -> Callable[ + [compute.SetInstanceTemplateInstanceGroupManagerRequest], compute.Operation + ]: + return self._set_instance_template + + @property + def set_target_pools( + self, + ) -> Callable[ + [compute.SetTargetPoolsInstanceGroupManagerRequest], compute.Operation + ]: + return self._set_target_pools + + @property + def update_per_instance_configs( + self, + ) -> Callable[ + [compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest], compute.Operation + ]: + return self._update_per_instance_configs + + def close(self): + self._session.close() + __all__ = ("InstanceGroupManagersRestTransport",) diff --git a/google/cloud/compute_v1/services/instance_groups/client.py b/google/cloud/compute_v1/services/instance_groups/client.py index e57d98434..173315564 100644 --- a/google/cloud/compute_v1/services/instance_groups/client.py +++ b/google/cloud/compute_v1/services/instance_groups/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.instance_groups import pagers from google.cloud.compute_v1.types import compute from .transports.base import InstanceGroupsTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,17 +337,18 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_instances( self, - request: compute.AddInstancesInstanceGroupRequest = None, + request: Union[compute.AddInstancesInstanceGroupRequest, dict] = None, *, project: str = None, zone: str = None, instance_group: str = None, instance_groups_add_instances_request_resource: compute.InstanceGroupsAddInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -346,7 +358,7 @@ def add_instances( for more information. Args: - request (google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.AddInstancesInstanceGroupRequest, dict]): The request object. A request message for InstanceGroups.AddInstances. See the method description for details. @@ -447,10 +459,10 @@ def add_instances( def aggregated_list( self, - request: compute.AggregatedListInstanceGroupsRequest = None, + request: Union[compute.AggregatedListInstanceGroupsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -458,7 +470,7 @@ def aggregated_list( by zone. Args: - request (google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListInstanceGroupsRequest, dict]): The request object. A request message for InstanceGroups.AggregatedList. See the method description for details. @@ -519,12 +531,12 @@ def aggregated_list( def delete( self, - request: compute.DeleteInstanceGroupRequest = None, + request: Union[compute.DeleteInstanceGroupRequest, dict] = None, *, project: str = None, zone: str = None, instance_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -534,7 +546,7 @@ def delete( instance group for more information. Args: - request (google.cloud.compute_v1.types.DeleteInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.DeleteInstanceGroupRequest, dict]): The request object. A request message for InstanceGroups.Delete. See the method description for details. @@ -619,12 +631,12 @@ def delete( def get( self, - request: compute.GetInstanceGroupRequest = None, + request: Union[compute.GetInstanceGroupRequest, dict] = None, *, project: str = None, zone: str = None, instance_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroup: @@ -635,7 +647,7 @@ def get( methods instead. Args: - request (google.cloud.compute_v1.types.GetInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.GetInstanceGroupRequest, dict]): The request object. A request message for InstanceGroups.Get. See the method description for details. @@ -717,12 +729,12 @@ def get( def insert( self, - request: compute.InsertInstanceGroupRequest = None, + request: Union[compute.InsertInstanceGroupRequest, dict] = None, *, project: str = None, zone: str = None, instance_group_resource: compute.InstanceGroup = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -730,7 +742,7 @@ def insert( using the parameters that are included in the request. Args: - request (google.cloud.compute_v1.types.InsertInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.InsertInstanceGroupRequest, dict]): The request object. A request message for InstanceGroups.Insert. See the method description for details. @@ -813,11 +825,11 @@ def insert( def list( self, - request: compute.ListInstanceGroupsRequest = None, + request: Union[compute.ListInstanceGroupsRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -827,7 +839,7 @@ def list( regionInstanceGroupManagers methods instead. Args: - request (google.cloud.compute_v1.types.ListInstanceGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListInstanceGroupsRequest, dict]): The request object. A request message for InstanceGroups.List. See the method description for details. @@ -898,13 +910,13 @@ def list( def list_instances( self, - request: compute.ListInstancesInstanceGroupsRequest = None, + request: Union[compute.ListInstancesInstanceGroupsRequest, dict] = None, *, project: str = None, zone: str = None, instance_group: str = None, instance_groups_list_instances_request_resource: compute.InstanceGroupsListInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: @@ -912,7 +924,7 @@ def list_instances( The orderBy query parameter is not supported. Args: - request (google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListInstancesInstanceGroupsRequest, dict]): The request object. A request message for InstanceGroups.ListInstances. See the method description for details. @@ -1008,13 +1020,13 @@ def list_instances( def remove_instances( self, - request: compute.RemoveInstancesInstanceGroupRequest = None, + request: Union[compute.RemoveInstancesInstanceGroupRequest, dict] = None, *, project: str = None, zone: str = None, instance_group: str = None, instance_groups_remove_instances_request_resource: compute.InstanceGroupsRemoveInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1026,7 +1038,7 @@ def remove_instances( is removed or deleted. Args: - request (google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.RemoveInstancesInstanceGroupRequest, dict]): The request object. A request message for InstanceGroups.RemoveInstances. See the method description for details. @@ -1127,13 +1139,13 @@ def remove_instances( def set_named_ports( self, - request: compute.SetNamedPortsInstanceGroupRequest = None, + request: Union[compute.SetNamedPortsInstanceGroupRequest, dict] = None, *, project: str = None, zone: str = None, instance_group: str = None, instance_groups_set_named_ports_request_resource: compute.InstanceGroupsSetNamedPortsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1141,7 +1153,7 @@ def set_named_ports( group. Args: - request (google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.SetNamedPortsInstanceGroupRequest, dict]): The request object. A request message for InstanceGroups.SetNamedPorts. See the method description for details. @@ -1240,6 +1252,19 @@ def set_named_ports( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/instance_groups/pagers.py b/google/cloud/compute_v1/services/instance_groups/pagers.py index 6c24e583c..70a25629a 100644 --- a/google/cloud/compute_v1/services/instance_groups/pagers.py +++ b/google/cloud/compute_v1/services/instance_groups/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceGroupAggregatedList]: + def pages(self) -> Iterator[compute.InstanceGroupAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.InstanceGroupsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.InstanceGroupsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceGroupList]: + def pages(self) -> Iterator[compute.InstanceGroupList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceGroup]: + def __iter__(self) -> Iterator[compute.InstanceGroup]: for page in self.pages: yield from page.items @@ -201,14 +201,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceGroupsListInstances]: + def pages(self) -> Iterator[compute.InstanceGroupsListInstances]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceWithNamedPorts]: + def __iter__(self) -> Iterator[compute.InstanceWithNamedPorts]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/instance_groups/transports/base.py b/google/cloud/compute_v1/services/instance_groups/transports/base.py index 3a7c9f07d..9451610e4 100644 --- a/google/cloud/compute_v1/services/instance_groups/transports/base.py +++ b/google/cloud/compute_v1/services/instance_groups/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InstanceGroupsTransport(abc.ABC): """Abstract transport class for InstanceGroups.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -187,6 +151,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_instances( self, diff --git a/google/cloud/compute_v1/services/instance_groups/transports/rest.py b/google/cloud/compute_v1/services/instance_groups/transports/rest.py index 23d9d33cc..ff8abb824 100644 --- a/google/cloud/compute_v1/services/instance_groups/transports/rest.py +++ b/google/cloud/compute_v1/services/instance_groups/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + InstanceGroupsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import InstanceGroupsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InstanceGroupsRestTransport(InstanceGroupsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_instances( + def _add_instances( self, request: compute.AddInstancesInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add instances method over HTTP. @@ -112,6 +139,9 @@ def add_instances( InstanceGroups.AddInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,33 +165,63 @@ def add_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/addInstances", + "body": "instance_groups_add_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AddInstancesInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupsAddInstancesRequest.to_json( - request.instance_groups_add_instances_request_resource, + compute.InstanceGroupsAddInstancesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/addInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group=request.instance_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddInstancesInstanceGroupRequest.to_json( + compute.AddInstancesInstanceGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddInstancesInstanceGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -172,10 +232,12 @@ def add_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListInstanceGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupAggregatedList: r"""Call the aggregated list method over HTTP. @@ -186,6 +248,9 @@ def aggregated_list( InstanceGroups.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,35 +259,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/instanceGroups".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/instanceGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListInstanceGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListInstanceGroupsRequest.to_json( + compute.AggregatedListInstanceGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListInstanceGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListInstanceGroupsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListInstanceGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListInstanceGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListInstanceGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListInstanceGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -234,10 +318,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -248,6 +334,9 @@ def delete( InstanceGroups.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -271,25 +360,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group=request.instance_group, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInstanceGroupRequest.to_json( + compute.DeleteInstanceGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInstanceGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -299,10 +417,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroup: r"""Call the get method over HTTP. @@ -313,6 +433,9 @@ def get( InstanceGroups.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -335,23 +458,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group=request.instance_group, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInstanceGroupRequest.to_json( + compute.GetInstanceGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -363,10 +517,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -377,6 +533,9 @@ def insert( InstanceGroups.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -400,30 +559,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups", + "body": "instance_group_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroup.to_json( - request.instance_group_resource, + compute.InstanceGroup(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertInstanceGroupRequest.to_json( + compute.InsertInstanceGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertInstanceGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -434,10 +623,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListInstanceGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupList: r"""Call the list method over HTTP. @@ -448,6 +639,9 @@ def list( InstanceGroups.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -456,30 +650,53 @@ def list( A list of InstanceGroup resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListInstanceGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInstanceGroupsRequest.to_json( + compute.ListInstanceGroupsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInstanceGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInstanceGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInstanceGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInstanceGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInstanceGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -491,10 +708,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_instances( + def _list_instances( self, request: compute.ListInstancesInstanceGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupsListInstances: r"""Call the list instances method over HTTP. @@ -505,6 +724,9 @@ def list_instances( InstanceGroups.ListInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -513,41 +735,63 @@ def list_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/listInstances", + "body": "instance_groups_list_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListInstancesInstanceGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupsListInstancesRequest.to_json( - request.instance_groups_list_instances_request_resource, + compute.InstanceGroupsListInstancesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/listInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group=request.instance_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInstancesInstanceGroupsRequest.to_json( + compute.ListInstancesInstanceGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInstancesInstanceGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInstancesInstanceGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInstancesInstanceGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInstancesInstanceGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInstancesInstanceGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -560,10 +804,12 @@ def list_instances( response.content, ignore_unknown_fields=True ) - def remove_instances( + def _remove_instances( self, request: compute.RemoveInstancesInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove instances method over HTTP. @@ -574,6 +820,9 @@ def remove_instances( InstanceGroups.RemoveInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -597,33 +846,63 @@ def remove_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/removeInstances", + "body": "instance_groups_remove_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.RemoveInstancesInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupsRemoveInstancesRequest.to_json( - request.instance_groups_remove_instances_request_resource, + compute.InstanceGroupsRemoveInstancesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/removeInstances".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group=request.instance_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveInstancesInstanceGroupRequest.to_json( + compute.RemoveInstancesInstanceGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveInstancesInstanceGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -634,10 +913,12 @@ def remove_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_named_ports( + def _set_named_ports( self, request: compute.SetNamedPortsInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set named ports method over HTTP. @@ -648,6 +929,9 @@ def set_named_ports( InstanceGroups.SetNamedPorts. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -671,33 +955,63 @@ def set_named_ports( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/setNamedPorts", + "body": "instance_groups_set_named_ports_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetNamedPortsInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupsSetNamedPortsRequest.to_json( - request.instance_groups_set_named_ports_request_resource, + compute.InstanceGroupsSetNamedPortsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/setNamedPorts".format( - host=self._host, - project=request.project, - zone=request.zone, - instance_group=request.instance_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetNamedPortsInstanceGroupRequest.to_json( + compute.SetNamedPortsInstanceGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetNamedPortsInstanceGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -708,5 +1022,66 @@ def set_named_ports( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_instances( + self, + ) -> Callable[[compute.AddInstancesInstanceGroupRequest], compute.Operation]: + return self._add_instances + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListInstanceGroupsRequest], + compute.InstanceGroupAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteInstanceGroupRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetInstanceGroupRequest], compute.InstanceGroup]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertInstanceGroupRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListInstanceGroupsRequest], compute.InstanceGroupList]: + return self._list + + @property + def list_instances( + self, + ) -> Callable[ + [compute.ListInstancesInstanceGroupsRequest], + compute.InstanceGroupsListInstances, + ]: + return self._list_instances + + @property + def remove_instances( + self, + ) -> Callable[[compute.RemoveInstancesInstanceGroupRequest], compute.Operation]: + return self._remove_instances + + @property + def set_named_ports( + self, + ) -> Callable[[compute.SetNamedPortsInstanceGroupRequest], compute.Operation]: + return self._set_named_ports + + def close(self): + self._session.close() + __all__ = ("InstanceGroupsRestTransport",) diff --git a/google/cloud/compute_v1/services/instance_templates/client.py b/google/cloud/compute_v1/services/instance_templates/client.py index d1f46e8f8..7a9b2995b 100644 --- a/google/cloud/compute_v1/services/instance_templates/client.py +++ b/google/cloud/compute_v1/services/instance_templates/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.instance_templates import pagers from google.cloud.compute_v1.types import compute from .transports.base import InstanceTemplatesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,15 +339,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteInstanceTemplateRequest = None, + request: Union[compute.DeleteInstanceTemplateRequest, dict] = None, *, project: str = None, instance_template: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -346,7 +358,7 @@ def delete( use by a managed instance group. Args: - request (google.cloud.compute_v1.types.DeleteInstanceTemplateRequest): + request (Union[google.cloud.compute_v1.types.DeleteInstanceTemplateRequest, dict]): The request object. A request message for InstanceTemplates.Delete. See the method description for details. @@ -422,11 +434,11 @@ def delete( def get( self, - request: compute.GetInstanceTemplateRequest = None, + request: Union[compute.GetInstanceTemplateRequest, dict] = None, *, project: str = None, instance_template: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceTemplate: @@ -435,7 +447,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetInstanceTemplateRequest): + request (Union[google.cloud.compute_v1.types.GetInstanceTemplateRequest, dict]): The request object. A request message for InstanceTemplates.Get. See the method description for details. @@ -499,11 +511,11 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyInstanceTemplateRequest = None, + request: Union[compute.GetIamPolicyInstanceTemplateRequest, dict] = None, *, project: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -511,7 +523,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyInstanceTemplateRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyInstanceTemplateRequest, dict]): The request object. A request message for InstanceTemplates.GetIamPolicy. See the method description for details. @@ -611,11 +623,11 @@ def get_iam_policy( def insert( self, - request: compute.InsertInstanceTemplateRequest = None, + request: Union[compute.InsertInstanceTemplateRequest, dict] = None, *, project: str = None, instance_template_resource: compute.InstanceTemplate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -627,7 +639,7 @@ def insert( the original template. Args: - request (google.cloud.compute_v1.types.InsertInstanceTemplateRequest): + request (Union[google.cloud.compute_v1.types.InsertInstanceTemplateRequest, dict]): The request object. A request message for InstanceTemplates.Insert. See the method description for details. @@ -701,10 +713,10 @@ def insert( def list( self, - request: compute.ListInstanceTemplatesRequest = None, + request: Union[compute.ListInstanceTemplatesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -712,7 +724,7 @@ def list( contained within the specified project. Args: - request (google.cloud.compute_v1.types.ListInstanceTemplatesRequest): + request (Union[google.cloud.compute_v1.types.ListInstanceTemplatesRequest, dict]): The request object. A request message for InstanceTemplates.List. See the method description for details. @@ -774,12 +786,12 @@ def list( def set_iam_policy( self, - request: compute.SetIamPolicyInstanceTemplateRequest = None, + request: Union[compute.SetIamPolicyInstanceTemplateRequest, dict] = None, *, project: str = None, resource: str = None, global_set_policy_request_resource: compute.GlobalSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -787,7 +799,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyInstanceTemplateRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyInstanceTemplateRequest, dict]): The request object. A request message for InstanceTemplates.SetIamPolicy. See the method description for details. @@ -898,12 +910,12 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsInstanceTemplateRequest = None, + request: Union[compute.TestIamPermissionsInstanceTemplateRequest, dict] = None, *, project: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -911,7 +923,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsInstanceTemplateRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsInstanceTemplateRequest, dict]): The request object. A request message for InstanceTemplates.TestIamPermissions. See the method description for details. @@ -981,6 +993,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/instance_templates/pagers.py b/google/cloud/compute_v1/services/instance_templates/pagers.py index 1813c70b2..2f6054229 100644 --- a/google/cloud/compute_v1/services/instance_templates/pagers.py +++ b/google/cloud/compute_v1/services/instance_templates/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceTemplateList]: + def pages(self) -> Iterator[compute.InstanceTemplateList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceTemplate]: + def __iter__(self) -> Iterator[compute.InstanceTemplate]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/instance_templates/transports/base.py b/google/cloud/compute_v1/services/instance_templates/transports/base.py index 243a5e981..bc77ae8aa 100644 --- a/google/cloud/compute_v1/services/instance_templates/transports/base.py +++ b/google/cloud/compute_v1/services/instance_templates/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InstanceTemplatesTransport(abc.ABC): """Abstract transport class for InstanceTemplates.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -183,6 +147,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/instance_templates/transports/rest.py b/google/cloud/compute_v1/services/instance_templates/transports/rest.py index 3d255efaf..676f83988 100644 --- a/google/cloud/compute_v1/services/instance_templates/transports/rest.py +++ b/google/cloud/compute_v1/services/instance_templates/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + InstanceTemplatesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import InstanceTemplatesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InstanceTemplatesRestTransport(InstanceTemplatesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteInstanceTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( InstanceTemplates.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}".format( - host=self._host, - project=request.project, - instance_template=request.instance_template, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_template", "instanceTemplate"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteInstanceTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInstanceTemplateRequest.to_json( + compute.DeleteInstanceTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInstanceTemplateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +223,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetInstanceTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceTemplate: r"""Call the get method over HTTP. @@ -176,6 +239,9 @@ def get( InstanceTemplates.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -189,22 +255,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}".format( - host=self._host, - project=request.project, - instance_template=request.instance_template, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_template", "instanceTemplate"), + ("project", "project"), + ] + + request_kwargs = compute.GetInstanceTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInstanceTemplateRequest.to_json( + compute.GetInstanceTemplateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -216,10 +313,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyInstanceTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -230,6 +329,9 @@ def get_iam_policy( InstanceTemplates.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -278,27 +380,55 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates/{resource}/getIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyInstanceTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyInstanceTemplateRequest.to_json( + compute.GetIamPolicyInstanceTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyInstanceTemplateRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -308,10 +438,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertInstanceTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -322,6 +454,9 @@ def insert( InstanceTemplates.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -345,30 +480,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates", + "body": "instance_template_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertInstanceTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceTemplate.to_json( - request.instance_template_resource, + compute.InstanceTemplate(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertInstanceTemplateRequest.to_json( + compute.InsertInstanceTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertInstanceTemplateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -379,10 +545,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListInstanceTemplatesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceTemplateList: r"""Call the list method over HTTP. @@ -393,6 +561,9 @@ def list( InstanceTemplates.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -401,30 +572,54 @@ def list( A list of instance templates. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListInstanceTemplatesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInstanceTemplatesRequest.to_json( + compute.ListInstanceTemplatesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInstanceTemplatesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInstanceTemplatesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInstanceTemplatesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInstanceTemplatesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInstanceTemplatesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -436,10 +631,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyInstanceTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -450,6 +647,9 @@ def set_iam_policy( InstanceTemplates.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -498,28 +698,62 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates/{resource}/setIamPolicy", + "body": "global_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyInstanceTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetPolicyRequest.to_json( - request.global_set_policy_request_resource, + compute.GlobalSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates/{resource}/setIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyInstanceTemplateRequest.to_json( + compute.SetIamPolicyInstanceTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -530,10 +764,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsInstanceTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -544,6 +780,9 @@ def test_iam_permissions( InstanceTemplates.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -552,28 +791,64 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/instanceTemplates/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsInstanceTemplateRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/instanceTemplates/{resource}/testIamPermissions".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsInstanceTemplateRequest.to_json( + compute.TestIamPermissionsInstanceTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -586,5 +861,53 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteInstanceTemplateRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetInstanceTemplateRequest], compute.InstanceTemplate]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyInstanceTemplateRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert( + self, + ) -> Callable[[compute.InsertInstanceTemplateRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListInstanceTemplatesRequest], compute.InstanceTemplateList]: + return self._list + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyInstanceTemplateRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsInstanceTemplateRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("InstanceTemplatesRestTransport",) diff --git a/google/cloud/compute_v1/services/instances/client.py b/google/cloud/compute_v1/services/instances/client.py index 1a7054c7f..b0e2d5d55 100644 --- a/google/cloud/compute_v1/services/instances/client.py +++ b/google/cloud/compute_v1/services/instances/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.instances import pagers from google.cloud.compute_v1.types import compute from .transports.base import InstancesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,18 +335,19 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_access_config( self, - request: compute.AddAccessConfigInstanceRequest = None, + request: Union[compute.AddAccessConfigInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, network_interface: str = None, access_config_resource: compute.AccessConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -343,7 +355,7 @@ def add_access_config( interface. Args: - request (google.cloud.compute_v1.types.AddAccessConfigInstanceRequest): + request (Union[google.cloud.compute_v1.types.AddAccessConfigInstanceRequest, dict]): The request object. A request message for Instances.AddAccessConfig. See the method description for details. @@ -444,13 +456,13 @@ def add_access_config( def add_resource_policies( self, - request: compute.AddResourcePoliciesInstanceRequest = None, + request: Union[compute.AddResourcePoliciesInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_add_resource_policies_request_resource: compute.InstancesAddResourcePoliciesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -459,7 +471,7 @@ def add_resource_policies( to this instance for scheduling live migrations. Args: - request (google.cloud.compute_v1.types.AddResourcePoliciesInstanceRequest): + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesInstanceRequest, dict]): The request object. A request message for Instances.AddResourcePolicies. See the method description for details. @@ -553,10 +565,10 @@ def add_resource_policies( def aggregated_list( self, - request: compute.AggregatedListInstancesRequest = None, + request: Union[compute.AggregatedListInstancesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -567,7 +579,7 @@ def aggregated_list( instances. Args: - request (google.cloud.compute_v1.types.AggregatedListInstancesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListInstancesRequest, dict]): The request object. A request message for Instances.AggregatedList. See the method description for details. @@ -628,13 +640,13 @@ def aggregated_list( def attach_disk( self, - request: compute.AttachDiskInstanceRequest = None, + request: Union[compute.AttachDiskInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, attached_disk_resource: compute.AttachedDisk = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -645,7 +657,7 @@ def attach_disk( persistent disk to your instance. Args: - request (google.cloud.compute_v1.types.AttachDiskInstanceRequest): + request (Union[google.cloud.compute_v1.types.AttachDiskInstanceRequest, dict]): The request object. A request message for Instances.AttachDisk. See the method description for details. @@ -735,12 +747,12 @@ def attach_disk( def bulk_insert( self, - request: compute.BulkInsertInstanceRequest = None, + request: Union[compute.BulkInsertInstanceRequest, dict] = None, *, project: str = None, zone: str = None, bulk_insert_instance_resource_resource: compute.BulkInsertInstanceResource = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -748,7 +760,7 @@ def bulk_insert( number of instances to create. Args: - request (google.cloud.compute_v1.types.BulkInsertInstanceRequest): + request (Union[google.cloud.compute_v1.types.BulkInsertInstanceRequest, dict]): The request object. A request message for Instances.BulkInsert. See the method description for details. @@ -835,12 +847,12 @@ def bulk_insert( def delete( self, - request: compute.DeleteInstanceRequest = None, + request: Union[compute.DeleteInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -848,7 +860,7 @@ def delete( information, see Deleting an instance. Args: - request (google.cloud.compute_v1.types.DeleteInstanceRequest): + request (Union[google.cloud.compute_v1.types.DeleteInstanceRequest, dict]): The request object. A request message for Instances.Delete. See the method description for details. @@ -933,14 +945,14 @@ def delete( def delete_access_config( self, - request: compute.DeleteAccessConfigInstanceRequest = None, + request: Union[compute.DeleteAccessConfigInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, access_config: str = None, network_interface: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -948,7 +960,7 @@ def delete_access_config( interface. Args: - request (google.cloud.compute_v1.types.DeleteAccessConfigInstanceRequest): + request (Union[google.cloud.compute_v1.types.DeleteAccessConfigInstanceRequest, dict]): The request object. A request message for Instances.DeleteAccessConfig. See the method description for details. @@ -1049,20 +1061,20 @@ def delete_access_config( def detach_disk( self, - request: compute.DetachDiskInstanceRequest = None, + request: Union[compute.DetachDiskInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, device_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Detaches a disk from an instance. Args: - request (google.cloud.compute_v1.types.DetachDiskInstanceRequest): + request (Union[google.cloud.compute_v1.types.DetachDiskInstanceRequest, dict]): The request object. A request message for Instances.DetachDisk. See the method description for details. @@ -1156,12 +1168,12 @@ def detach_disk( def get( self, - request: compute.GetInstanceRequest = None, + request: Union[compute.GetInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Instance: @@ -1169,7 +1181,7 @@ def get( of available instances by making a list() request. Args: - request (google.cloud.compute_v1.types.GetInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetInstanceRequest, dict]): The request object. A request message for Instances.Get. See the method description for details. project (str): @@ -1243,13 +1255,13 @@ def get( def get_effective_firewalls( self, - request: compute.GetEffectiveFirewallsInstanceRequest = None, + request: Union[compute.GetEffectiveFirewallsInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, network_interface: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstancesGetEffectiveFirewallsResponse: @@ -1257,7 +1269,7 @@ def get_effective_firewalls( of the instance. Args: - request (google.cloud.compute_v1.types.GetEffectiveFirewallsInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetEffectiveFirewallsInstanceRequest, dict]): The request object. A request message for Instances.GetEffectiveFirewalls. See the method description for details. @@ -1336,19 +1348,19 @@ def get_effective_firewalls( def get_guest_attributes( self, - request: compute.GetGuestAttributesInstanceRequest = None, + request: Union[compute.GetGuestAttributesInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.GuestAttributes: r"""Returns the specified guest attributes entry. Args: - request (google.cloud.compute_v1.types.GetGuestAttributesInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetGuestAttributesInstanceRequest, dict]): The request object. A request message for Instances.GetGuestAttributes. See the method description for details. @@ -1418,12 +1430,12 @@ def get_guest_attributes( def get_iam_policy( self, - request: compute.GetIamPolicyInstanceRequest = None, + request: Union[compute.GetIamPolicyInstanceRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1431,7 +1443,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyInstanceRequest, dict]): The request object. A request message for Instances.GetIamPolicy. See the method description for details. @@ -1540,19 +1552,19 @@ def get_iam_policy( def get_screenshot( self, - request: compute.GetScreenshotInstanceRequest = None, + request: Union[compute.GetScreenshotInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Screenshot: r"""Returns the screenshot from the specified instance. Args: - request (google.cloud.compute_v1.types.GetScreenshotInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetScreenshotInstanceRequest, dict]): The request object. A request message for Instances.GetScreenshot. See the method description for details. @@ -1622,12 +1634,12 @@ def get_screenshot( def get_serial_port_output( self, - request: compute.GetSerialPortOutputInstanceRequest = None, + request: Union[compute.GetSerialPortOutputInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SerialPortOutput: @@ -1635,7 +1647,7 @@ def get_serial_port_output( specified instance. Args: - request (google.cloud.compute_v1.types.GetSerialPortOutputInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetSerialPortOutputInstanceRequest, dict]): The request object. A request message for Instances.GetSerialPortOutput. See the method description for details. @@ -1705,19 +1717,19 @@ def get_serial_port_output( def get_shielded_instance_identity( self, - request: compute.GetShieldedInstanceIdentityInstanceRequest = None, + request: Union[compute.GetShieldedInstanceIdentityInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ShieldedInstanceIdentity: r"""Returns the Shielded Instance Identity of an instance Args: - request (google.cloud.compute_v1.types.GetShieldedInstanceIdentityInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetShieldedInstanceIdentityInstanceRequest, dict]): The request object. A request message for Instances.GetShieldedInstanceIdentity. See the method description for details. @@ -1789,12 +1801,12 @@ def get_shielded_instance_identity( def insert( self, - request: compute.InsertInstanceRequest = None, + request: Union[compute.InsertInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance_resource: compute.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1802,7 +1814,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertInstanceRequest): + request (Union[google.cloud.compute_v1.types.InsertInstanceRequest, dict]): The request object. A request message for Instances.Insert. See the method description for details. @@ -1885,11 +1897,11 @@ def insert( def list( self, - request: compute.ListInstancesRequest = None, + request: Union[compute.ListInstancesRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1897,7 +1909,7 @@ def list( specified zone. Args: - request (google.cloud.compute_v1.types.ListInstancesRequest): + request (Union[google.cloud.compute_v1.types.ListInstancesRequest, dict]): The request object. A request message for Instances.List. See the method description for details. project (str): @@ -1967,12 +1979,12 @@ def list( def list_referrers( self, - request: compute.ListReferrersInstancesRequest = None, + request: Union[compute.ListReferrersInstancesRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListReferrersPager: @@ -1984,7 +1996,7 @@ def list_referrers( instances. Args: - request (google.cloud.compute_v1.types.ListReferrersInstancesRequest): + request (Union[google.cloud.compute_v1.types.ListReferrersInstancesRequest, dict]): The request object. A request message for Instances.ListReferrers. See the method description for details. @@ -2067,20 +2079,20 @@ def list_referrers( def remove_resource_policies( self, - request: compute.RemoveResourcePoliciesInstanceRequest = None, + request: Union[compute.RemoveResourcePoliciesInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_remove_resource_policies_request_resource: compute.InstancesRemoveResourcePoliciesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Removes resource policies from an instance. Args: - request (google.cloud.compute_v1.types.RemoveResourcePoliciesInstanceRequest): + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesInstanceRequest, dict]): The request object. A request message for Instances.RemoveResourcePolicies. See the method description for details. @@ -2179,12 +2191,12 @@ def remove_resource_policies( def reset( self, - request: compute.ResetInstanceRequest = None, + request: Union[compute.ResetInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2193,7 +2205,7 @@ def reset( information, see Resetting an instance. Args: - request (google.cloud.compute_v1.types.ResetInstanceRequest): + request (Union[google.cloud.compute_v1.types.ResetInstanceRequest, dict]): The request object. A request message for Instances.Reset. See the method description for details. project (str): @@ -2277,19 +2289,19 @@ def reset( def send_diagnostic_interrupt( self, - request: compute.SendDiagnosticInterruptInstanceRequest = None, + request: Union[compute.SendDiagnosticInterruptInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SendDiagnosticInterruptInstanceResponse: r"""Sends diagnostic interrupt to the instance. Args: - request (google.cloud.compute_v1.types.SendDiagnosticInterruptInstanceRequest): + request (Union[google.cloud.compute_v1.types.SendDiagnosticInterruptInstanceRequest, dict]): The request object. A request message for Instances.SendDiagnosticInterrupt. See the method description for details. @@ -2364,19 +2376,19 @@ def send_diagnostic_interrupt( def set_deletion_protection( self, - request: compute.SetDeletionProtectionInstanceRequest = None, + request: Union[compute.SetDeletionProtectionInstanceRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Sets deletion protection on the instance. Args: - request (google.cloud.compute_v1.types.SetDeletionProtectionInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetDeletionProtectionInstanceRequest, dict]): The request object. A request message for Instances.SetDeletionProtection. See the method description for details. @@ -2461,14 +2473,14 @@ def set_deletion_protection( def set_disk_auto_delete( self, - request: compute.SetDiskAutoDeleteInstanceRequest = None, + request: Union[compute.SetDiskAutoDeleteInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, auto_delete: bool = None, device_name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2476,7 +2488,7 @@ def set_disk_auto_delete( instance. Args: - request (google.cloud.compute_v1.types.SetDiskAutoDeleteInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetDiskAutoDeleteInstanceRequest, dict]): The request object. A request message for Instances.SetDiskAutoDelete. See the method description for details. @@ -2579,13 +2591,13 @@ def set_disk_auto_delete( def set_iam_policy( self, - request: compute.SetIamPolicyInstanceRequest = None, + request: Union[compute.SetIamPolicyInstanceRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, zone_set_policy_request_resource: compute.ZoneSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -2593,7 +2605,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyInstanceRequest, dict]): The request object. A request message for Instances.SetIamPolicy. See the method description for details. @@ -2713,13 +2725,13 @@ def set_iam_policy( def set_labels( self, - request: compute.SetLabelsInstanceRequest = None, + request: Union[compute.SetLabelsInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_set_labels_request_resource: compute.InstancesSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2727,7 +2739,7 @@ def set_labels( labels, read the Labeling Resources documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsInstanceRequest, dict]): The request object. A request message for Instances.SetLabels. See the method description for details. @@ -2823,13 +2835,13 @@ def set_labels( def set_machine_resources( self, - request: compute.SetMachineResourcesInstanceRequest = None, + request: Union[compute.SetMachineResourcesInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_set_machine_resources_request_resource: compute.InstancesSetMachineResourcesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2837,7 +2849,7 @@ def set_machine_resources( stopped instance to the values specified in the request. Args: - request (google.cloud.compute_v1.types.SetMachineResourcesInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetMachineResourcesInstanceRequest, dict]): The request object. A request message for Instances.SetMachineResources. See the method description for details. @@ -2933,13 +2945,13 @@ def set_machine_resources( def set_machine_type( self, - request: compute.SetMachineTypeInstanceRequest = None, + request: Union[compute.SetMachineTypeInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_set_machine_type_request_resource: compute.InstancesSetMachineTypeRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2947,7 +2959,7 @@ def set_machine_type( the machine type specified in the request. Args: - request (google.cloud.compute_v1.types.SetMachineTypeInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetMachineTypeInstanceRequest, dict]): The request object. A request message for Instances.SetMachineType. See the method description for details. @@ -3043,13 +3055,13 @@ def set_machine_type( def set_metadata( self, - request: compute.SetMetadataInstanceRequest = None, + request: Union[compute.SetMetadataInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, metadata_resource: compute.Metadata = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3057,7 +3069,7 @@ def set_metadata( included in the request. Args: - request (google.cloud.compute_v1.types.SetMetadataInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetMetadataInstanceRequest, dict]): The request object. A request message for Instances.SetMetadata. See the method description for details. @@ -3149,13 +3161,13 @@ def set_metadata( def set_min_cpu_platform( self, - request: compute.SetMinCpuPlatformInstanceRequest = None, + request: Union[compute.SetMinCpuPlatformInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_set_min_cpu_platform_request_resource: compute.InstancesSetMinCpuPlatformRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3165,7 +3177,7 @@ def set_min_cpu_platform( Minimum CPU Platform. Args: - request (google.cloud.compute_v1.types.SetMinCpuPlatformInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetMinCpuPlatformInstanceRequest, dict]): The request object. A request message for Instances.SetMinCpuPlatform. See the method description for details. @@ -3261,13 +3273,13 @@ def set_min_cpu_platform( def set_scheduling( self, - request: compute.SetSchedulingInstanceRequest = None, + request: Union[compute.SetSchedulingInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, scheduling_resource: compute.Scheduling = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3277,7 +3289,7 @@ def set_scheduling( information on the possible instance states. Args: - request (google.cloud.compute_v1.types.SetSchedulingInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetSchedulingInstanceRequest, dict]): The request object. A request message for Instances.SetScheduling. See the method description for details. @@ -3367,13 +3379,13 @@ def set_scheduling( def set_service_account( self, - request: compute.SetServiceAccountInstanceRequest = None, + request: Union[compute.SetServiceAccountInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_set_service_account_request_resource: compute.InstancesSetServiceAccountRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3382,7 +3394,7 @@ def set_service_account( access scopes for an instance. Args: - request (google.cloud.compute_v1.types.SetServiceAccountInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetServiceAccountInstanceRequest, dict]): The request object. A request message for Instances.SetServiceAccount. See the method description for details. @@ -3478,13 +3490,15 @@ def set_service_account( def set_shielded_instance_integrity_policy( self, - request: compute.SetShieldedInstanceIntegrityPolicyInstanceRequest = None, + request: Union[ + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict + ] = None, *, project: str = None, zone: str = None, instance: str = None, shielded_instance_integrity_policy_resource: compute.ShieldedInstanceIntegrityPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3494,7 +3508,7 @@ def set_shielded_instance_integrity_policy( the JSON merge patch format and processing rules. Args: - request (google.cloud.compute_v1.types.SetShieldedInstanceIntegrityPolicyInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetShieldedInstanceIntegrityPolicyInstanceRequest, dict]): The request object. A request message for Instances.SetShieldedInstanceIntegrityPolicy. See the method description for details. @@ -3594,13 +3608,13 @@ def set_shielded_instance_integrity_policy( def set_tags( self, - request: compute.SetTagsInstanceRequest = None, + request: Union[compute.SetTagsInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, tags_resource: compute.Tags = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3608,7 +3622,7 @@ def set_tags( data included in the request. Args: - request (google.cloud.compute_v1.types.SetTagsInstanceRequest): + request (Union[google.cloud.compute_v1.types.SetTagsInstanceRequest, dict]): The request object. A request message for Instances.SetTags. See the method description for details. @@ -3700,19 +3714,19 @@ def set_tags( def simulate_maintenance_event( self, - request: compute.SimulateMaintenanceEventInstanceRequest = None, + request: Union[compute.SimulateMaintenanceEventInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Simulates a maintenance event on the instance. Args: - request (google.cloud.compute_v1.types.SimulateMaintenanceEventInstanceRequest): + request (Union[google.cloud.compute_v1.types.SimulateMaintenanceEventInstanceRequest, dict]): The request object. A request message for Instances.SimulateMaintenanceEvent. See the method description for details. @@ -3799,12 +3813,12 @@ def simulate_maintenance_event( def start( self, - request: compute.StartInstanceRequest = None, + request: Union[compute.StartInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3813,7 +3827,7 @@ def start( Restart an instance. Args: - request (google.cloud.compute_v1.types.StartInstanceRequest): + request (Union[google.cloud.compute_v1.types.StartInstanceRequest, dict]): The request object. A request message for Instances.Start. See the method description for details. project (str): @@ -3897,13 +3911,13 @@ def start( def start_with_encryption_key( self, - request: compute.StartWithEncryptionKeyInstanceRequest = None, + request: Union[compute.StartWithEncryptionKeyInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instances_start_with_encryption_key_request_resource: compute.InstancesStartWithEncryptionKeyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -3912,7 +3926,7 @@ def start_with_encryption_key( Restart an instance. Args: - request (google.cloud.compute_v1.types.StartWithEncryptionKeyInstanceRequest): + request (Union[google.cloud.compute_v1.types.StartWithEncryptionKeyInstanceRequest, dict]): The request object. A request message for Instances.StartWithEncryptionKey. See the method description for details. @@ -4015,12 +4029,12 @@ def start_with_encryption_key( def stop( self, - request: compute.StopInstanceRequest = None, + request: Union[compute.StopInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -4033,7 +4047,7 @@ def stop( more information, see Stopping an instance. Args: - request (google.cloud.compute_v1.types.StopInstanceRequest): + request (Union[google.cloud.compute_v1.types.StopInstanceRequest, dict]): The request object. A request message for Instances.Stop. See the method description for details. project (str): @@ -4117,13 +4131,13 @@ def stop( def test_iam_permissions( self, - request: compute.TestIamPermissionsInstanceRequest = None, + request: Union[compute.TestIamPermissionsInstanceRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -4131,7 +4145,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsInstanceRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsInstanceRequest, dict]): The request object. A request message for Instances.TestIamPermissions. See the method description for details. @@ -4212,13 +4226,13 @@ def test_iam_permissions( def update( self, - request: compute.UpdateInstanceRequest = None, + request: Union[compute.UpdateInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, instance_resource: compute.Instance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -4228,7 +4242,7 @@ def update( instance for a list of updatable instance properties. Args: - request (google.cloud.compute_v1.types.UpdateInstanceRequest): + request (Union[google.cloud.compute_v1.types.UpdateInstanceRequest, dict]): The request object. A request message for Instances.Update. See the method description for details. @@ -4320,14 +4334,14 @@ def update( def update_access_config( self, - request: compute.UpdateAccessConfigInstanceRequest = None, + request: Union[compute.UpdateAccessConfigInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, network_interface: str = None, access_config_resource: compute.AccessConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -4337,7 +4351,7 @@ def update_access_config( uses the JSON merge patch format and processing rules. Args: - request (google.cloud.compute_v1.types.UpdateAccessConfigInstanceRequest): + request (Union[google.cloud.compute_v1.types.UpdateAccessConfigInstanceRequest, dict]): The request object. A request message for Instances.UpdateAccessConfig. See the method description for details. @@ -4438,13 +4452,13 @@ def update_access_config( def update_display_device( self, - request: compute.UpdateDisplayDeviceInstanceRequest = None, + request: Union[compute.UpdateDisplayDeviceInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, display_device_resource: compute.DisplayDevice = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -4454,7 +4468,7 @@ def update_display_device( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.UpdateDisplayDeviceInstanceRequest): + request (Union[google.cloud.compute_v1.types.UpdateDisplayDeviceInstanceRequest, dict]): The request object. A request message for Instances.UpdateDisplayDevice. See the method description for details. @@ -4546,14 +4560,14 @@ def update_display_device( def update_network_interface( self, - request: compute.UpdateNetworkInterfaceInstanceRequest = None, + request: Union[compute.UpdateNetworkInterfaceInstanceRequest, dict] = None, *, project: str = None, zone: str = None, instance: str = None, network_interface: str = None, network_interface_resource: compute.NetworkInterface = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -4566,7 +4580,7 @@ def update_network_interface( follows PATCH semantics. Args: - request (google.cloud.compute_v1.types.UpdateNetworkInterfaceInstanceRequest): + request (Union[google.cloud.compute_v1.types.UpdateNetworkInterfaceInstanceRequest, dict]): The request object. A request message for Instances.UpdateNetworkInterface. See the method description for details. @@ -4667,13 +4681,15 @@ def update_network_interface( def update_shielded_instance_config( self, - request: compute.UpdateShieldedInstanceConfigInstanceRequest = None, + request: Union[ + compute.UpdateShieldedInstanceConfigInstanceRequest, dict + ] = None, *, project: str = None, zone: str = None, instance: str = None, shielded_instance_config_resource: compute.ShieldedInstanceConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -4683,7 +4699,7 @@ def update_shielded_instance_config( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.UpdateShieldedInstanceConfigInstanceRequest): + request (Union[google.cloud.compute_v1.types.UpdateShieldedInstanceConfigInstanceRequest, dict]): The request object. A request message for Instances.UpdateShieldedInstanceConfig. See the method description for details. @@ -4779,6 +4795,19 @@ def update_shielded_instance_config( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/instances/pagers.py b/google/cloud/compute_v1/services/instances/pagers.py index d262c74c8..9be68d10d 100644 --- a/google/cloud/compute_v1/services/instances/pagers.py +++ b/google/cloud/compute_v1/services/instances/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceAggregatedList]: + def pages(self) -> Iterator[compute.InstanceAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.InstancesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.InstancesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceList]: + def pages(self) -> Iterator[compute.InstanceList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Instance]: + def __iter__(self) -> Iterator[compute.Instance]: for page in self.pages: yield from page.items @@ -201,14 +201,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InstanceListReferrers]: + def pages(self) -> Iterator[compute.InstanceListReferrers]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Reference]: + def __iter__(self) -> Iterator[compute.Reference]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/instances/transports/base.py b/google/cloud/compute_v1/services/instances/transports/base.py index eb2efb4c6..709749bae 100644 --- a/google/cloud/compute_v1/services/instances/transports/base.py +++ b/google/cloud/compute_v1/services/instances/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InstancesTransport(abc.ABC): """Abstract transport class for Instances.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -329,6 +293,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_access_config( self, diff --git a/google/cloud/compute_v1/services/instances/transports/rest.py b/google/cloud/compute_v1/services/instances/transports/rest.py index ee426afa0..792948735 100644 --- a/google/cloud/compute_v1/services/instances/transports/rest.py +++ b/google/cloud/compute_v1/services/instances/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import InstancesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import InstancesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InstancesRestTransport(InstancesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_access_config( + def _add_access_config( self, request: compute.AddAccessConfigInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add access config method over HTTP. @@ -112,6 +136,9 @@ def add_access_config( Instances.AddAccessConfig. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,34 +162,64 @@ def add_access_config( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig", + "body": "access_config_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("network_interface", "networkInterface"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AddAccessConfigInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.AccessConfig.to_json( - request.access_config_resource, + compute.AccessConfig(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddAccessConfigInstanceRequest.to_json( + compute.AddAccessConfigInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["networkInterface"] = request.network_interface - if compute.AddAccessConfigInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -173,10 +230,12 @@ def add_access_config( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def add_resource_policies( + def _add_resource_policies( self, request: compute.AddResourcePoliciesInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add resource policies method over HTTP. @@ -187,6 +246,9 @@ def add_resource_policies( Instances.AddResourcePolicies. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -210,33 +272,63 @@ def add_resource_policies( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies", + "body": "instances_add_resource_policies_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AddResourcePoliciesInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesAddResourcePoliciesRequest.to_json( - request.instances_add_resource_policies_request_resource, + compute.InstancesAddResourcePoliciesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddResourcePoliciesInstanceRequest.to_json( + compute.AddResourcePoliciesInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddResourcePoliciesInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -247,10 +339,12 @@ def add_resource_policies( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListInstancesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceAggregatedList: r"""Call the aggregated list method over HTTP. @@ -261,6 +355,9 @@ def aggregated_list( Instances.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -269,32 +366,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/instances".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListInstancesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListInstancesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListInstancesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListInstancesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListInstancesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListInstancesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/instances", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListInstancesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListInstancesRequest.to_json( + compute.AggregatedListInstancesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -306,10 +425,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def attach_disk( + def _attach_disk( self, request: compute.AttachDiskInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the attach disk method over HTTP. @@ -320,6 +441,9 @@ def attach_disk( Instances.AttachDisk. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -343,35 +467,61 @@ def attach_disk( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk", + "body": "attached_disk_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AttachDiskInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.AttachedDisk.to_json( - request.attached_disk_resource, + compute.AttachedDisk(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AttachDiskInstanceRequest.to_json( + compute.AttachDiskInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AttachDiskInstanceRequest.force_attach in request: - query_params["forceAttach"] = request.force_attach - if compute.AttachDiskInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -382,10 +532,12 @@ def attach_disk( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def bulk_insert( + def _bulk_insert( self, request: compute.BulkInsertInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the bulk insert method over HTTP. @@ -396,6 +548,9 @@ def bulk_insert( Instances.BulkInsert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -419,30 +574,60 @@ def bulk_insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert", + "body": "bulk_insert_instance_resource_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.BulkInsertInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BulkInsertInstanceResource.to_json( - request.bulk_insert_instance_resource_resource, + compute.BulkInsertInstanceResource(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.BulkInsertInstanceRequest.to_json( + compute.BulkInsertInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.BulkInsertInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -453,10 +638,12 @@ def bulk_insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -467,6 +654,9 @@ def delete( Instances.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -490,25 +680,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInstanceRequest.to_json( + compute.DeleteInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -518,10 +737,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_access_config( + def _delete_access_config( self, request: compute.DeleteAccessConfigInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete access config method over HTTP. @@ -532,6 +753,9 @@ def delete_access_config( Instances.DeleteAccessConfig. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -555,27 +779,58 @@ def delete_access_config( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("access_config", "accessConfig"), + ("instance", "instance"), + ("network_interface", "networkInterface"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteAccessConfigInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteAccessConfigInstanceRequest.to_json( + compute.DeleteAccessConfigInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["accessConfig"] = request.access_config - query_params["networkInterface"] = request.network_interface - if compute.DeleteAccessConfigInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -585,10 +840,12 @@ def delete_access_config( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def detach_disk( + def _detach_disk( self, request: compute.DetachDiskInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the detach disk method over HTTP. @@ -599,6 +856,9 @@ def detach_disk( Instances.DetachDisk. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -622,26 +882,55 @@ def detach_disk( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("device_name", "deviceName"), + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DetachDiskInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DetachDiskInstanceRequest.to_json( + compute.DetachDiskInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["deviceName"] = request.device_name - if compute.DetachDiskInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -651,10 +940,12 @@ def detach_disk( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Instance: r"""Call the get method over HTTP. @@ -664,6 +955,9 @@ def get( The request object. A request message for Instances.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -677,23 +971,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInstanceRequest.to_json( + compute.GetInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -703,10 +1028,12 @@ def get( # Return the response return compute.Instance.from_json(response.content, ignore_unknown_fields=True) - def get_effective_firewalls( + def _get_effective_firewalls( self, request: compute.GetEffectiveFirewallsInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstancesGetEffectiveFirewallsResponse: r"""Call the get effective firewalls method over HTTP. @@ -717,6 +1044,9 @@ def get_effective_firewalls( Instances.GetEffectiveFirewalls. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -725,24 +1055,57 @@ def get_effective_firewalls( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("network_interface", "networkInterface"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetEffectiveFirewallsInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetEffectiveFirewallsInstanceRequest.to_json( + compute.GetEffectiveFirewallsInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["networkInterface"] = request.network_interface + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -754,10 +1117,12 @@ def get_effective_firewalls( response.content, ignore_unknown_fields=True ) - def get_guest_attributes( + def _get_guest_attributes( self, request: compute.GetGuestAttributesInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.GuestAttributes: r"""Call the get guest attributes method over HTTP. @@ -768,6 +1133,9 @@ def get_guest_attributes( Instances.GetGuestAttributes. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -776,27 +1144,56 @@ def get_guest_attributes( A guest attributes entry. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetGuestAttributesInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetGuestAttributesInstanceRequest.to_json( + compute.GetGuestAttributesInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetGuestAttributesInstanceRequest.query_path in request: - query_params["queryPath"] = request.query_path - if compute.GetGuestAttributesInstanceRequest.variable_key in request: - query_params["variableKey"] = request.variable_key + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -808,10 +1205,12 @@ def get_guest_attributes( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -822,6 +1221,9 @@ def get_iam_policy( Instances.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -870,30 +1272,54 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyInstanceRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetIamPolicyInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyInstanceRequest.to_json( + compute.GetIamPolicyInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -903,10 +1329,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def get_screenshot( + def _get_screenshot( self, request: compute.GetScreenshotInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Screenshot: r"""Call the get screenshot method over HTTP. @@ -917,6 +1345,9 @@ def get_screenshot( Instances.GetScreenshot. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -925,23 +1356,56 @@ def get_screenshot( An instance's screenshot. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetScreenshotInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetScreenshotInstanceRequest.to_json( + compute.GetScreenshotInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -953,10 +1417,12 @@ def get_screenshot( response.content, ignore_unknown_fields=True ) - def get_serial_port_output( + def _get_serial_port_output( self, request: compute.GetSerialPortOutputInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SerialPortOutput: r"""Call the get serial port output method over HTTP. @@ -967,6 +1433,9 @@ def get_serial_port_output( Instances.GetSerialPortOutput. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -975,27 +1444,56 @@ def get_serial_port_output( An instance serial console output. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetSerialPortOutputInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetSerialPortOutputInstanceRequest.to_json( + compute.GetSerialPortOutputInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetSerialPortOutputInstanceRequest.port in request: - query_params["port"] = request.port - if compute.GetSerialPortOutputInstanceRequest.start in request: - query_params["start"] = request.start + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1007,10 +1505,12 @@ def get_serial_port_output( response.content, ignore_unknown_fields=True ) - def get_shielded_instance_identity( + def _get_shielded_instance_identity( self, request: compute.GetShieldedInstanceIdentityInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ShieldedInstanceIdentity: r"""Call the get shielded instance @@ -1022,6 +1522,9 @@ def get_shielded_instance_identity( Instances.GetShieldedInstanceIdentity. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1030,23 +1533,58 @@ def get_shielded_instance_identity( A Shielded Instance Identity. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetShieldedInstanceIdentityInstanceRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetShieldedInstanceIdentityInstanceRequest.to_json( + compute.GetShieldedInstanceIdentityInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1058,10 +1596,12 @@ def get_shielded_instance_identity( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -1072,6 +1612,9 @@ def insert( Instances.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1095,32 +1638,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances", + "body": "instance_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Instance.to_json( - request.instance_resource, + compute.Instance(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertInstanceRequest.to_json( + compute.InsertInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id - if compute.InsertInstanceRequest.source_instance_template in request: - query_params["sourceInstanceTemplate"] = request.source_instance_template + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1131,10 +1702,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListInstancesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceList: r"""Call the list method over HTTP. @@ -1144,6 +1717,9 @@ def list( The request object. A request message for Instances.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1152,30 +1728,53 @@ def list( Contains a list of instances. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances".format( - host=self._host, project=request.project, zone=request.zone, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInstancesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInstancesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInstancesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInstancesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInstancesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListInstancesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInstancesRequest.to_json( + compute.ListInstancesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1187,10 +1786,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_referrers( + def _list_referrers( self, request: compute.ListReferrersInstancesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceListReferrers: r"""Call the list referrers method over HTTP. @@ -1201,6 +1802,9 @@ def list_referrers( Instances.ListReferrers. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1211,33 +1815,56 @@ def list_referrers( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListReferrersInstancesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListReferrersInstancesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListReferrersInstancesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListReferrersInstancesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListReferrersInstancesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListReferrersInstancesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListReferrersInstancesRequest.to_json( + compute.ListReferrersInstancesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1249,10 +1876,12 @@ def list_referrers( response.content, ignore_unknown_fields=True ) - def remove_resource_policies( + def _remove_resource_policies( self, request: compute.RemoveResourcePoliciesInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove resource policies method over HTTP. @@ -1263,6 +1892,9 @@ def remove_resource_policies( Instances.RemoveResourcePolicies. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1286,33 +1918,63 @@ def remove_resource_policies( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies", + "body": "instances_remove_resource_policies_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.RemoveResourcePoliciesInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesRemoveResourcePoliciesRequest.to_json( - request.instances_remove_resource_policies_request_resource, + compute.InstancesRemoveResourcePoliciesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveResourcePoliciesInstanceRequest.to_json( + compute.RemoveResourcePoliciesInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveResourcePoliciesInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1323,10 +1985,12 @@ def remove_resource_policies( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def reset( + def _reset( self, request: compute.ResetInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the reset method over HTTP. @@ -1337,6 +2001,9 @@ def reset( Instances.Reset. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1360,25 +2027,54 @@ def reset( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ResetInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ResetInstanceRequest.to_json( + compute.ResetInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ResetInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1388,10 +2084,12 @@ def reset( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def send_diagnostic_interrupt( + def _send_diagnostic_interrupt( self, request: compute.SendDiagnosticInterruptInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SendDiagnosticInterruptInstanceResponse: r"""Call the send diagnostic interrupt method over HTTP. @@ -1402,6 +2100,9 @@ def send_diagnostic_interrupt( Instances.SendDiagnosticInterrupt. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1413,23 +2114,56 @@ def send_diagnostic_interrupt( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SendDiagnosticInterruptInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SendDiagnosticInterruptInstanceRequest.to_json( + compute.SendDiagnosticInterruptInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1441,10 +2175,12 @@ def send_diagnostic_interrupt( response.content, ignore_unknown_fields=True ) - def set_deletion_protection( + def _set_deletion_protection( self, request: compute.SetDeletionProtectionInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set deletion protection method over HTTP. @@ -1455,6 +2191,9 @@ def set_deletion_protection( Instances.SetDeletionProtection. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1478,27 +2217,56 @@ def set_deletion_protection( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetDeletionProtectionInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetDeletionProtectionInstanceRequest.to_json( + compute.SetDeletionProtectionInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetDeletionProtectionInstanceRequest.deletion_protection in request: - query_params["deletionProtection"] = request.deletion_protection - if compute.SetDeletionProtectionInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1508,10 +2276,12 @@ def set_deletion_protection( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_disk_auto_delete( + def _set_disk_auto_delete( self, request: compute.SetDiskAutoDeleteInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set disk auto delete method over HTTP. @@ -1522,6 +2292,9 @@ def set_disk_auto_delete( Instances.SetDiskAutoDelete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1545,27 +2318,58 @@ def set_disk_auto_delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("auto_delete", "autoDelete"), + ("device_name", "deviceName"), + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetDiskAutoDeleteInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetDiskAutoDeleteInstanceRequest.to_json( + compute.SetDiskAutoDeleteInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["autoDelete"] = request.auto_delete - query_params["deviceName"] = request.device_name - if compute.SetDiskAutoDeleteInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1575,10 +2379,12 @@ def set_disk_auto_delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -1589,6 +2395,9 @@ def set_iam_policy( Instances.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1637,31 +2446,61 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy", + "body": "zone_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetIamPolicyInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ZoneSetPolicyRequest.to_json( - request.zone_set_policy_request_resource, + compute.ZoneSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyInstanceRequest.to_json( + compute.SetIamPolicyInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1672,10 +2511,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -1686,6 +2527,9 @@ def set_labels( Instances.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1709,33 +2553,61 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels", + "body": "instances_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetLabelsInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesSetLabelsRequest.to_json( - request.instances_set_labels_request_resource, + compute.InstancesSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsInstanceRequest.to_json( + compute.SetLabelsInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetLabelsInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1746,10 +2618,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_machine_resources( + def _set_machine_resources( self, request: compute.SetMachineResourcesInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set machine resources method over HTTP. @@ -1760,6 +2634,9 @@ def set_machine_resources( Instances.SetMachineResources. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1783,33 +2660,63 @@ def set_machine_resources( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources", + "body": "instances_set_machine_resources_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetMachineResourcesInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesSetMachineResourcesRequest.to_json( - request.instances_set_machine_resources_request_resource, + compute.InstancesSetMachineResourcesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetMachineResourcesInstanceRequest.to_json( + compute.SetMachineResourcesInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetMachineResourcesInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1820,10 +2727,12 @@ def set_machine_resources( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_machine_type( + def _set_machine_type( self, request: compute.SetMachineTypeInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set machine type method over HTTP. @@ -1834,6 +2743,9 @@ def set_machine_type( Instances.SetMachineType. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1857,33 +2769,63 @@ def set_machine_type( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType", + "body": "instances_set_machine_type_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetMachineTypeInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesSetMachineTypeRequest.to_json( - request.instances_set_machine_type_request_resource, + compute.InstancesSetMachineTypeRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetMachineTypeInstanceRequest.to_json( + compute.SetMachineTypeInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetMachineTypeInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1894,10 +2836,12 @@ def set_machine_type( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_metadata( + def _set_metadata( self, request: compute.SetMetadataInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set metadata method over HTTP. @@ -1908,6 +2852,9 @@ def set_metadata( Instances.SetMetadata. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1931,33 +2878,61 @@ def set_metadata( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata", + "body": "metadata_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetMetadataInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Metadata.to_json( - request.metadata_resource, + compute.Metadata(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetMetadataInstanceRequest.to_json( + compute.SetMetadataInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetMetadataInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1968,10 +2943,12 @@ def set_metadata( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_min_cpu_platform( + def _set_min_cpu_platform( self, request: compute.SetMinCpuPlatformInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set min cpu platform method over HTTP. @@ -1982,6 +2959,9 @@ def set_min_cpu_platform( Instances.SetMinCpuPlatform. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2005,33 +2985,63 @@ def set_min_cpu_platform( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform", + "body": "instances_set_min_cpu_platform_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetMinCpuPlatformInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesSetMinCpuPlatformRequest.to_json( - request.instances_set_min_cpu_platform_request_resource, + compute.InstancesSetMinCpuPlatformRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetMinCpuPlatformInstanceRequest.to_json( + compute.SetMinCpuPlatformInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetMinCpuPlatformInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2042,10 +3052,12 @@ def set_min_cpu_platform( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_scheduling( + def _set_scheduling( self, request: compute.SetSchedulingInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set scheduling method over HTTP. @@ -2056,6 +3068,9 @@ def set_scheduling( Instances.SetScheduling. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2079,33 +3094,63 @@ def set_scheduling( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling", + "body": "scheduling_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetSchedulingInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Scheduling.to_json( - request.scheduling_resource, + compute.Scheduling(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSchedulingInstanceRequest.to_json( + compute.SetSchedulingInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetSchedulingInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2116,10 +3161,12 @@ def set_scheduling( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_service_account( + def _set_service_account( self, request: compute.SetServiceAccountInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set service account method over HTTP. @@ -2130,6 +3177,9 @@ def set_service_account( Instances.SetServiceAccount. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2153,33 +3203,63 @@ def set_service_account( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount", + "body": "instances_set_service_account_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetServiceAccountInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesSetServiceAccountRequest.to_json( - request.instances_set_service_account_request_resource, + compute.InstancesSetServiceAccountRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetServiceAccountInstanceRequest.to_json( + compute.SetServiceAccountInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetServiceAccountInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2190,10 +3270,12 @@ def set_service_account( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_shielded_instance_integrity_policy( + def _set_shielded_instance_integrity_policy( self, request: compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set shielded instance @@ -2205,6 +3287,9 @@ def set_shielded_instance_integrity_policy( Instances.SetShieldedInstanceIntegrityPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2228,36 +3313,65 @@ def set_shielded_instance_integrity_policy( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy", + "body": "shielded_instance_integrity_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ShieldedInstanceIntegrityPolicy.to_json( - request.shielded_instance_integrity_policy_resource, + compute.ShieldedInstanceIntegrityPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.to_json( + compute.SetShieldedInstanceIntegrityPolicyInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.SetShieldedInstanceIntegrityPolicyInstanceRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2268,10 +3382,12 @@ def set_shielded_instance_integrity_policy( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_tags( + def _set_tags( self, request: compute.SetTagsInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set tags method over HTTP. @@ -2282,6 +3398,9 @@ def set_tags( Instances.SetTags. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2305,33 +3424,61 @@ def set_tags( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags", + "body": "tags_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetTagsInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Tags.to_json( - request.tags_resource, + compute.Tags(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetTagsInstanceRequest.to_json( + compute.SetTagsInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetTagsInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2342,10 +3489,12 @@ def set_tags( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def simulate_maintenance_event( + def _simulate_maintenance_event( self, request: compute.SimulateMaintenanceEventInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the simulate maintenance @@ -2357,6 +3506,9 @@ def simulate_maintenance_event( Instances.SimulateMaintenanceEvent. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2380,23 +3532,58 @@ def simulate_maintenance_event( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SimulateMaintenanceEventInstanceRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SimulateMaintenanceEventInstanceRequest.to_json( + compute.SimulateMaintenanceEventInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2406,10 +3593,12 @@ def simulate_maintenance_event( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def start( + def _start( self, request: compute.StartInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the start method over HTTP. @@ -2420,6 +3609,9 @@ def start( Instances.Start. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2443,25 +3635,54 @@ def start( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.StartInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.StartInstanceRequest.to_json( + compute.StartInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.StartInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2471,10 +3692,12 @@ def start( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def start_with_encryption_key( + def _start_with_encryption_key( self, request: compute.StartWithEncryptionKeyInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the start with encryption key method over HTTP. @@ -2485,6 +3708,9 @@ def start_with_encryption_key( Instances.StartWithEncryptionKey. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2508,33 +3734,63 @@ def start_with_encryption_key( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey", + "body": "instances_start_with_encryption_key_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.StartWithEncryptionKeyInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstancesStartWithEncryptionKeyRequest.to_json( - request.instances_start_with_encryption_key_request_resource, + compute.InstancesStartWithEncryptionKeyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.StartWithEncryptionKeyInstanceRequest.to_json( + compute.StartWithEncryptionKeyInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.StartWithEncryptionKeyInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2545,10 +3801,12 @@ def start_with_encryption_key( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def stop( + def _stop( self, request: compute.StopInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the stop method over HTTP. @@ -2558,6 +3816,9 @@ def stop( The request object. A request message for Instances.Stop. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2581,25 +3842,54 @@ def stop( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.StopInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.StopInstanceRequest.to_json( + compute.StopInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.StopInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -2609,10 +3899,12 @@ def stop( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -2623,6 +3915,9 @@ def test_iam_permissions( Instances.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2631,31 +3926,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.TestIamPermissionsInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsInstanceRequest.to_json( + compute.TestIamPermissionsInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2668,10 +3995,12 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) - def update( + def _update( self, request: compute.UpdateInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -2682,6 +4011,9 @@ def update( Instances.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2705,39 +4037,61 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}", + "body": "instance_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdateInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Instance.to_json( - request.instance_resource, + compute.Instance(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateInstanceRequest.to_json( + compute.UpdateInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateInstanceRequest.minimal_action in request: - query_params["minimalAction"] = request.minimal_action - if compute.UpdateInstanceRequest.most_disruptive_allowed_action in request: - query_params[ - "mostDisruptiveAllowedAction" - ] = request.most_disruptive_allowed_action - if compute.UpdateInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2748,10 +4102,12 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_access_config( + def _update_access_config( self, request: compute.UpdateAccessConfigInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update access config method over HTTP. @@ -2762,6 +4118,9 @@ def update_access_config( Instances.UpdateAccessConfig. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2785,34 +4144,64 @@ def update_access_config( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig", + "body": "access_config_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("network_interface", "networkInterface"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdateAccessConfigInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.AccessConfig.to_json( - request.access_config_resource, + compute.AccessConfig(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateAccessConfigInstanceRequest.to_json( + compute.UpdateAccessConfigInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["networkInterface"] = request.network_interface - if compute.UpdateAccessConfigInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2823,10 +4212,12 @@ def update_access_config( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_display_device( + def _update_display_device( self, request: compute.UpdateDisplayDeviceInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update display device method over HTTP. @@ -2837,6 +4228,9 @@ def update_display_device( Instances.UpdateDisplayDevice. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2860,33 +4254,63 @@ def update_display_device( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice", + "body": "display_device_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdateDisplayDeviceInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.DisplayDevice.to_json( - request.display_device_resource, + compute.DisplayDevice(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateDisplayDeviceInstanceRequest.to_json( + compute.UpdateDisplayDeviceInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateDisplayDeviceInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2897,10 +4321,12 @@ def update_display_device( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_network_interface( + def _update_network_interface( self, request: compute.UpdateNetworkInterfaceInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update network interface method over HTTP. @@ -2911,6 +4337,9 @@ def update_network_interface( Instances.UpdateNetworkInterface. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -2934,34 +4363,64 @@ def update_network_interface( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface", + "body": "network_interface_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("network_interface", "networkInterface"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdateNetworkInterfaceInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkInterface.to_json( - request.network_interface_resource, + compute.NetworkInterface(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateNetworkInterfaceInstanceRequest.to_json( + compute.UpdateNetworkInterfaceInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["networkInterface"] = request.network_interface - if compute.UpdateNetworkInterfaceInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2972,10 +4431,12 @@ def update_network_interface( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_shielded_instance_config( + def _update_shielded_instance_config( self, request: compute.UpdateShieldedInstanceConfigInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update shielded instance @@ -2987,6 +4448,9 @@ def update_shielded_instance_config( Instances.UpdateShieldedInstanceConfig. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -3010,33 +4474,65 @@ def update_shielded_instance_config( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig", + "body": "shielded_instance_config_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance", "instance"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.UpdateShieldedInstanceConfigInstanceRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ShieldedInstanceConfig.to_json( - request.shielded_instance_config_resource, + compute.ShieldedInstanceConfig(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig".format( - host=self._host, - project=request.project, - zone=request.zone, - instance=request.instance, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateShieldedInstanceConfigInstanceRequest.to_json( + compute.UpdateShieldedInstanceConfigInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateShieldedInstanceConfigInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3047,5 +4543,269 @@ def update_shielded_instance_config( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_access_config( + self, + ) -> Callable[[compute.AddAccessConfigInstanceRequest], compute.Operation]: + return self._add_access_config + + @property + def add_resource_policies( + self, + ) -> Callable[[compute.AddResourcePoliciesInstanceRequest], compute.Operation]: + return self._add_resource_policies + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListInstancesRequest], compute.InstanceAggregatedList + ]: + return self._aggregated_list + + @property + def attach_disk( + self, + ) -> Callable[[compute.AttachDiskInstanceRequest], compute.Operation]: + return self._attach_disk + + @property + def bulk_insert( + self, + ) -> Callable[[compute.BulkInsertInstanceRequest], compute.Operation]: + return self._bulk_insert + + @property + def delete(self) -> Callable[[compute.DeleteInstanceRequest], compute.Operation]: + return self._delete + + @property + def delete_access_config( + self, + ) -> Callable[[compute.DeleteAccessConfigInstanceRequest], compute.Operation]: + return self._delete_access_config + + @property + def detach_disk( + self, + ) -> Callable[[compute.DetachDiskInstanceRequest], compute.Operation]: + return self._detach_disk + + @property + def get(self) -> Callable[[compute.GetInstanceRequest], compute.Instance]: + return self._get + + @property + def get_effective_firewalls( + self, + ) -> Callable[ + [compute.GetEffectiveFirewallsInstanceRequest], + compute.InstancesGetEffectiveFirewallsResponse, + ]: + return self._get_effective_firewalls + + @property + def get_guest_attributes( + self, + ) -> Callable[[compute.GetGuestAttributesInstanceRequest], compute.GuestAttributes]: + return self._get_guest_attributes + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyInstanceRequest], compute.Policy]: + return self._get_iam_policy + + @property + def get_screenshot( + self, + ) -> Callable[[compute.GetScreenshotInstanceRequest], compute.Screenshot]: + return self._get_screenshot + + @property + def get_serial_port_output( + self, + ) -> Callable[ + [compute.GetSerialPortOutputInstanceRequest], compute.SerialPortOutput + ]: + return self._get_serial_port_output + + @property + def get_shielded_instance_identity( + self, + ) -> Callable[ + [compute.GetShieldedInstanceIdentityInstanceRequest], + compute.ShieldedInstanceIdentity, + ]: + return self._get_shielded_instance_identity + + @property + def insert(self) -> Callable[[compute.InsertInstanceRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListInstancesRequest], compute.InstanceList]: + return self._list + + @property + def list_referrers( + self, + ) -> Callable[ + [compute.ListReferrersInstancesRequest], compute.InstanceListReferrers + ]: + return self._list_referrers + + @property + def remove_resource_policies( + self, + ) -> Callable[[compute.RemoveResourcePoliciesInstanceRequest], compute.Operation]: + return self._remove_resource_policies + + @property + def reset(self) -> Callable[[compute.ResetInstanceRequest], compute.Operation]: + return self._reset + + @property + def send_diagnostic_interrupt( + self, + ) -> Callable[ + [compute.SendDiagnosticInterruptInstanceRequest], + compute.SendDiagnosticInterruptInstanceResponse, + ]: + return self._send_diagnostic_interrupt + + @property + def set_deletion_protection( + self, + ) -> Callable[[compute.SetDeletionProtectionInstanceRequest], compute.Operation]: + return self._set_deletion_protection + + @property + def set_disk_auto_delete( + self, + ) -> Callable[[compute.SetDiskAutoDeleteInstanceRequest], compute.Operation]: + return self._set_disk_auto_delete + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyInstanceRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsInstanceRequest], compute.Operation]: + return self._set_labels + + @property + def set_machine_resources( + self, + ) -> Callable[[compute.SetMachineResourcesInstanceRequest], compute.Operation]: + return self._set_machine_resources + + @property + def set_machine_type( + self, + ) -> Callable[[compute.SetMachineTypeInstanceRequest], compute.Operation]: + return self._set_machine_type + + @property + def set_metadata( + self, + ) -> Callable[[compute.SetMetadataInstanceRequest], compute.Operation]: + return self._set_metadata + + @property + def set_min_cpu_platform( + self, + ) -> Callable[[compute.SetMinCpuPlatformInstanceRequest], compute.Operation]: + return self._set_min_cpu_platform + + @property + def set_scheduling( + self, + ) -> Callable[[compute.SetSchedulingInstanceRequest], compute.Operation]: + return self._set_scheduling + + @property + def set_service_account( + self, + ) -> Callable[[compute.SetServiceAccountInstanceRequest], compute.Operation]: + return self._set_service_account + + @property + def set_shielded_instance_integrity_policy( + self, + ) -> Callable[ + [compute.SetShieldedInstanceIntegrityPolicyInstanceRequest], compute.Operation + ]: + return self._set_shielded_instance_integrity_policy + + @property + def set_tags(self) -> Callable[[compute.SetTagsInstanceRequest], compute.Operation]: + return self._set_tags + + @property + def simulate_maintenance_event( + self, + ) -> Callable[[compute.SimulateMaintenanceEventInstanceRequest], compute.Operation]: + return self._simulate_maintenance_event + + @property + def start(self) -> Callable[[compute.StartInstanceRequest], compute.Operation]: + return self._start + + @property + def start_with_encryption_key( + self, + ) -> Callable[[compute.StartWithEncryptionKeyInstanceRequest], compute.Operation]: + return self._start_with_encryption_key + + @property + def stop(self) -> Callable[[compute.StopInstanceRequest], compute.Operation]: + return self._stop + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsInstanceRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + @property + def update(self) -> Callable[[compute.UpdateInstanceRequest], compute.Operation]: + return self._update + + @property + def update_access_config( + self, + ) -> Callable[[compute.UpdateAccessConfigInstanceRequest], compute.Operation]: + return self._update_access_config + + @property + def update_display_device( + self, + ) -> Callable[[compute.UpdateDisplayDeviceInstanceRequest], compute.Operation]: + return self._update_display_device + + @property + def update_network_interface( + self, + ) -> Callable[[compute.UpdateNetworkInterfaceInstanceRequest], compute.Operation]: + return self._update_network_interface + + @property + def update_shielded_instance_config( + self, + ) -> Callable[ + [compute.UpdateShieldedInstanceConfigInstanceRequest], compute.Operation + ]: + return self._update_shielded_instance_config + + def close(self): + self._session.close() + __all__ = ("InstancesRestTransport",) diff --git a/google/cloud/compute_v1/services/interconnect_attachments/client.py b/google/cloud/compute_v1/services/interconnect_attachments/client.py index 7374b5011..1fb34a362 100644 --- a/google/cloud/compute_v1/services/interconnect_attachments/client.py +++ b/google/cloud/compute_v1/services/interconnect_attachments/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.interconnect_attachments import pagers from google.cloud.compute_v1.types import compute from .transports.base import InterconnectAttachmentsTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListInterconnectAttachmentsRequest = None, + request: Union[ + compute.AggregatedListInterconnectAttachmentsRequest, dict + ] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -343,7 +357,7 @@ def aggregated_list( attachments. Args: - request (google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest, dict]): The request object. A request message for InterconnectAttachments.AggregatedList. See the method description for details. @@ -406,19 +420,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteInterconnectAttachmentRequest = None, + request: Union[compute.DeleteInterconnectAttachmentRequest, dict] = None, *, project: str = None, region: str = None, interconnect_attachment: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified interconnect attachment. Args: - request (google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest): + request (Union[google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest, dict]): The request object. A request message for InterconnectAttachments.Delete. See the method description for details. @@ -501,19 +515,19 @@ def delete( def get( self, - request: compute.GetInterconnectAttachmentRequest = None, + request: Union[compute.GetInterconnectAttachmentRequest, dict] = None, *, project: str = None, region: str = None, interconnect_attachment: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectAttachment: r"""Returns the specified interconnect attachment. Args: - request (google.cloud.compute_v1.types.GetInterconnectAttachmentRequest): + request (Union[google.cloud.compute_v1.types.GetInterconnectAttachmentRequest, dict]): The request object. A request message for InterconnectAttachments.Get. See the method description for details. @@ -589,12 +603,12 @@ def get( def insert( self, - request: compute.InsertInterconnectAttachmentRequest = None, + request: Union[compute.InsertInterconnectAttachmentRequest, dict] = None, *, project: str = None, region: str = None, interconnect_attachment_resource: compute.InterconnectAttachment = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -602,7 +616,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest): + request (Union[google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest, dict]): The request object. A request message for InterconnectAttachments.Insert. See the method description for details. @@ -685,11 +699,11 @@ def insert( def list( self, - request: compute.ListInterconnectAttachmentsRequest = None, + request: Union[compute.ListInterconnectAttachmentsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -697,7 +711,7 @@ def list( contained within the specified region. Args: - request (google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest): + request (Union[google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest, dict]): The request object. A request message for InterconnectAttachments.List. See the method description for details. @@ -767,13 +781,13 @@ def list( def patch( self, - request: compute.PatchInterconnectAttachmentRequest = None, + request: Union[compute.PatchInterconnectAttachmentRequest, dict] = None, *, project: str = None, region: str = None, interconnect_attachment: str = None, interconnect_attachment_resource: compute.InterconnectAttachment = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -783,7 +797,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest): + request (Union[google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest, dict]): The request object. A request message for InterconnectAttachments.Patch. See the method description for details. @@ -877,6 +891,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/interconnect_attachments/pagers.py b/google/cloud/compute_v1/services/interconnect_attachments/pagers.py index bcb65e01f..bfddfabf8 100644 --- a/google/cloud/compute_v1/services/interconnect_attachments/pagers.py +++ b/google/cloud/compute_v1/services/interconnect_attachments/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,7 +74,7 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InterconnectAttachmentAggregatedList]: + def pages(self) -> Iterator[compute.InterconnectAttachmentAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -83,7 +83,7 @@ def pages(self) -> Iterable[compute.InterconnectAttachmentAggregatedList]: def __iter__( self, - ) -> Iterable[Tuple[str, compute.InterconnectAttachmentsScopedList]]: + ) -> Iterator[Tuple[str, compute.InterconnectAttachmentsScopedList]]: for page in self.pages: yield from page.items.items() @@ -141,14 +141,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InterconnectAttachmentList]: + def pages(self) -> Iterator[compute.InterconnectAttachmentList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InterconnectAttachment]: + def __iter__(self) -> Iterator[compute.InterconnectAttachment]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py b/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py index baae3e9ad..a26d8320c 100644 --- a/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py +++ b/google/cloud/compute_v1/services/interconnect_attachments/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InterconnectAttachmentsTransport(abc.ABC): """Abstract transport class for InterconnectAttachments.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py b/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py index 192e5f837..bfe2abe15 100644 --- a/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py +++ b/google/cloud/compute_v1/services/interconnect_attachments/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + InterconnectAttachmentsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import InterconnectAttachmentsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InterconnectAttachmentsRestTransport(InterconnectAttachmentsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListInterconnectAttachmentsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectAttachmentAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( InterconnectAttachments.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,38 +150,56 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/interconnectAttachments".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/interconnectAttachments", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListInterconnectAttachmentsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListInterconnectAttachmentsRequest.to_json( + compute.AggregatedListInterconnectAttachmentsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListInterconnectAttachmentsRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.AggregatedListInterconnectAttachmentsRequest.include_all_scopes - in request - ): - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListInterconnectAttachmentsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListInterconnectAttachmentsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListInterconnectAttachmentsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListInterconnectAttachmentsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +211,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteInterconnectAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -177,6 +227,9 @@ def delete( InterconnectAttachments.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -200,25 +253,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format( - host=self._host, - project=request.project, - region=request.region, - interconnect_attachment=request.interconnect_attachment, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect_attachment", "interconnectAttachment"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteInterconnectAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInterconnectAttachmentRequest.to_json( + compute.DeleteInterconnectAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInterconnectAttachmentRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -228,10 +312,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetInterconnectAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectAttachment: r"""Call the get method over HTTP. @@ -242,6 +328,9 @@ def get( InterconnectAttachments.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -258,23 +347,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format( - host=self._host, - project=request.project, - region=request.region, - interconnect_attachment=request.interconnect_attachment, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect_attachment", "interconnectAttachment"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetInterconnectAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInterconnectAttachmentRequest.to_json( + compute.GetInterconnectAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -286,10 +408,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertInterconnectAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -300,6 +424,9 @@ def insert( InterconnectAttachments.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -323,32 +450,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/interconnectAttachments", + "body": "interconnect_attachment_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertInterconnectAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InterconnectAttachment.to_json( - request.interconnect_attachment_resource, + compute.InterconnectAttachment(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/interconnectAttachments".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertInterconnectAttachmentRequest.to_json( + compute.InsertInterconnectAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertInterconnectAttachmentRequest.request_id in request: - query_params["requestId"] = request.request_id - if compute.InsertInterconnectAttachmentRequest.validate_only in request: - query_params["validateOnly"] = request.validate_only + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -359,10 +516,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListInterconnectAttachmentsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectAttachmentList: r"""Call the list method over HTTP. @@ -373,6 +532,9 @@ def list( InterconnectAttachments.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -384,30 +546,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/interconnectAttachments".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/interconnectAttachments", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListInterconnectAttachmentsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInterconnectAttachmentsRequest.to_json( + compute.ListInterconnectAttachmentsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInterconnectAttachmentsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInterconnectAttachmentsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInterconnectAttachmentsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInterconnectAttachmentsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInterconnectAttachmentsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -419,10 +606,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchInterconnectAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -433,6 +622,9 @@ def patch( InterconnectAttachments.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -456,33 +648,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}", + "body": "interconnect_attachment_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect_attachment", "interconnectAttachment"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchInterconnectAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InterconnectAttachment.to_json( - request.interconnect_attachment_resource, + compute.InterconnectAttachment(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}".format( - host=self._host, - project=request.project, - region=request.region, - interconnect_attachment=request.interconnect_attachment, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchInterconnectAttachmentRequest.to_json( + compute.PatchInterconnectAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchInterconnectAttachmentRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -493,5 +715,51 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListInterconnectAttachmentsRequest], + compute.InterconnectAttachmentAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteInterconnectAttachmentRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetInterconnectAttachmentRequest], compute.InterconnectAttachment + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertInterconnectAttachmentRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListInterconnectAttachmentsRequest], compute.InterconnectAttachmentList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchInterconnectAttachmentRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("InterconnectAttachmentsRestTransport",) diff --git a/google/cloud/compute_v1/services/interconnect_locations/client.py b/google/cloud/compute_v1/services/interconnect_locations/client.py index ddfa9ff8d..348ec2d09 100644 --- a/google/cloud/compute_v1/services/interconnect_locations/client.py +++ b/google/cloud/compute_v1/services/interconnect_locations/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.interconnect_locations import pagers from google.cloud.compute_v1.types import compute from .transports.base import InterconnectLocationsTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,15 +339,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetInterconnectLocationRequest = None, + request: Union[compute.GetInterconnectLocationRequest, dict] = None, *, project: str = None, interconnect_location: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectLocation: @@ -345,7 +357,7 @@ def get( locations by making a list() request. Args: - request (google.cloud.compute_v1.types.GetInterconnectLocationRequest): + request (Union[google.cloud.compute_v1.types.GetInterconnectLocationRequest, dict]): The request object. A request message for InterconnectLocations.Get. See the method description for details. @@ -413,10 +425,10 @@ def get( def list( self, - request: compute.ListInterconnectLocationsRequest = None, + request: Union[compute.ListInterconnectLocationsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -424,7 +436,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListInterconnectLocationsRequest): + request (Union[google.cloud.compute_v1.types.ListInterconnectLocationsRequest, dict]): The request object. A request message for InterconnectLocations.List. See the method description for details. @@ -485,6 +497,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/interconnect_locations/pagers.py b/google/cloud/compute_v1/services/interconnect_locations/pagers.py index 454443958..b17ff7b22 100644 --- a/google/cloud/compute_v1/services/interconnect_locations/pagers.py +++ b/google/cloud/compute_v1/services/interconnect_locations/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InterconnectLocationList]: + def pages(self) -> Iterator[compute.InterconnectLocationList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InterconnectLocation]: + def __iter__(self) -> Iterator[compute.InterconnectLocation]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/interconnect_locations/transports/base.py b/google/cloud/compute_v1/services/interconnect_locations/transports/base.py index 16f563fae..1412f4507 100644 --- a/google/cloud/compute_v1/services/interconnect_locations/transports/base.py +++ b/google/cloud/compute_v1/services/interconnect_locations/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InterconnectLocationsTransport(abc.ABC): """Abstract transport class for InterconnectLocations.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -167,6 +131,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py b/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py index 6834bbaaa..1bd5b7874 100644 --- a/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py +++ b/google/cloud/compute_v1/services/interconnect_locations/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + InterconnectLocationsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import InterconnectLocationsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InterconnectLocationsRestTransport(InterconnectLocationsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetInterconnectLocationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectLocation: r"""Call the get method over HTTP. @@ -112,6 +139,9 @@ def get( InterconnectLocations.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -127,22 +157,55 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnectLocations/{interconnect_location}".format( - host=self._host, - project=request.project, - interconnect_location=request.interconnect_location, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/interconnectLocations/{interconnect_location}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect_location", "interconnectLocation"), + ("project", "project"), + ] + + request_kwargs = compute.GetInterconnectLocationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInterconnectLocationRequest.to_json( + compute.GetInterconnectLocationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -154,10 +217,12 @@ def get( response.content, ignore_unknown_fields=True ) - def list( + def _list( self, request: compute.ListInterconnectLocationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectLocationList: r"""Call the list method over HTTP. @@ -168,6 +233,9 @@ def list( InterconnectLocations.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -179,30 +247,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnectLocations".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/interconnectLocations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListInterconnectLocationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInterconnectLocationsRequest.to_json( + compute.ListInterconnectLocationsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInterconnectLocationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInterconnectLocationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInterconnectLocationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInterconnectLocationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInterconnectLocationsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -214,5 +306,24 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def get( + self, + ) -> Callable[ + [compute.GetInterconnectLocationRequest], compute.InterconnectLocation + ]: + return self._get + + @property + def list( + self, + ) -> Callable[ + [compute.ListInterconnectLocationsRequest], compute.InterconnectLocationList + ]: + return self._list + + def close(self): + self._session.close() + __all__ = ("InterconnectLocationsRestTransport",) diff --git a/google/cloud/compute_v1/services/interconnects/client.py b/google/cloud/compute_v1/services/interconnects/client.py index 3f901999c..339cfc327 100644 --- a/google/cloud/compute_v1/services/interconnects/client.py +++ b/google/cloud/compute_v1/services/interconnects/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.interconnects import pagers from google.cloud.compute_v1.types import compute from .transports.base import InterconnectsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,22 +335,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteInterconnectRequest = None, + request: Union[compute.DeleteInterconnectRequest, dict] = None, *, project: str = None, interconnect: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified interconnect. Args: - request (google.cloud.compute_v1.types.DeleteInterconnectRequest): + request (Union[google.cloud.compute_v1.types.DeleteInterconnectRequest, dict]): The request object. A request message for Interconnects.Delete. See the method description for details. @@ -413,11 +425,11 @@ def delete( def get( self, - request: compute.GetInterconnectRequest = None, + request: Union[compute.GetInterconnectRequest, dict] = None, *, project: str = None, interconnect: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Interconnect: @@ -425,7 +437,7 @@ def get( available interconnects by making a list() request. Args: - request (google.cloud.compute_v1.types.GetInterconnectRequest): + request (Union[google.cloud.compute_v1.types.GetInterconnectRequest, dict]): The request object. A request message for Interconnects.Get. See the method description for details. @@ -490,11 +502,11 @@ def get( def get_diagnostics( self, - request: compute.GetDiagnosticsInterconnectRequest = None, + request: Union[compute.GetDiagnosticsInterconnectRequest, dict] = None, *, project: str = None, interconnect: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectsGetDiagnosticsResponse: @@ -502,7 +514,7 @@ def get_diagnostics( interconnect. Args: - request (google.cloud.compute_v1.types.GetDiagnosticsInterconnectRequest): + request (Union[google.cloud.compute_v1.types.GetDiagnosticsInterconnectRequest, dict]): The request object. A request message for Interconnects.GetDiagnostics. See the method description for details. @@ -565,11 +577,11 @@ def get_diagnostics( def insert( self, - request: compute.InsertInterconnectRequest = None, + request: Union[compute.InsertInterconnectRequest, dict] = None, *, project: str = None, interconnect_resource: compute.Interconnect = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -577,7 +589,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertInterconnectRequest): + request (Union[google.cloud.compute_v1.types.InsertInterconnectRequest, dict]): The request object. A request message for Interconnects.Insert. See the method description for details. @@ -651,10 +663,10 @@ def insert( def list( self, - request: compute.ListInterconnectsRequest = None, + request: Union[compute.ListInterconnectsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -662,7 +674,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListInterconnectsRequest): + request (Union[google.cloud.compute_v1.types.ListInterconnectsRequest, dict]): The request object. A request message for Interconnects.List. See the method description for details. @@ -725,12 +737,12 @@ def list( def patch( self, - request: compute.PatchInterconnectRequest = None, + request: Union[compute.PatchInterconnectRequest, dict] = None, *, project: str = None, interconnect: str = None, interconnect_resource: compute.Interconnect = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -740,7 +752,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchInterconnectRequest): + request (Union[google.cloud.compute_v1.types.PatchInterconnectRequest, dict]): The request object. A request message for Interconnects.Patch. See the method description for details. @@ -819,6 +831,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/interconnects/pagers.py b/google/cloud/compute_v1/services/interconnects/pagers.py index 5957ea431..d9f2f46ab 100644 --- a/google/cloud/compute_v1/services/interconnects/pagers.py +++ b/google/cloud/compute_v1/services/interconnects/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.InterconnectList]: + def pages(self) -> Iterator[compute.InterconnectList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Interconnect]: + def __iter__(self) -> Iterator[compute.Interconnect]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/interconnects/transports/base.py b/google/cloud/compute_v1/services/interconnects/transports/base.py index 3cafc0f82..c289aec85 100644 --- a/google/cloud/compute_v1/services/interconnects/transports/base.py +++ b/google/cloud/compute_v1/services/interconnects/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class InterconnectsTransport(abc.ABC): """Abstract transport class for Interconnects.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/interconnects/transports/rest.py b/google/cloud/compute_v1/services/interconnects/transports/rest.py index 312b6b479..0dd0f5403 100644 --- a/google/cloud/compute_v1/services/interconnects/transports/rest.py +++ b/google/cloud/compute_v1/services/interconnects/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + InterconnectsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import InterconnectsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class InterconnectsRestTransport(InterconnectsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteInterconnectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( Interconnects.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,22 +165,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnects/{interconnect}".format( - host=self._host, project=request.project, interconnect=request.interconnect, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/interconnects/{interconnect}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect", "interconnect"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteInterconnectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInterconnectRequest.to_json( + compute.DeleteInterconnectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteInterconnectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +221,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetInterconnectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Interconnect: r"""Call the get method over HTTP. @@ -174,6 +237,9 @@ def get( Interconnects.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -188,20 +254,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnects/{interconnect}".format( - host=self._host, project=request.project, interconnect=request.interconnect, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/interconnects/{interconnect}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect", "interconnect"), + ("project", "project"), + ] + + request_kwargs = compute.GetInterconnectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetInterconnectRequest.to_json( + compute.GetInterconnectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -213,10 +312,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_diagnostics( + def _get_diagnostics( self, request: compute.GetDiagnosticsInterconnectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectsGetDiagnosticsResponse: r"""Call the get diagnostics method over HTTP. @@ -227,6 +328,9 @@ def get_diagnostics( Interconnects.GetDiagnostics. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -237,20 +341,55 @@ def get_diagnostics( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnects/{interconnect}/getDiagnostics".format( - host=self._host, project=request.project, interconnect=request.interconnect, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/interconnects/{interconnect}/getDiagnostics", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect", "interconnect"), + ("project", "project"), + ] + + request_kwargs = compute.GetDiagnosticsInterconnectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetDiagnosticsInterconnectRequest.to_json( + compute.GetDiagnosticsInterconnectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -262,10 +401,12 @@ def get_diagnostics( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertInterconnectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -276,6 +417,9 @@ def insert( Interconnects.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -299,30 +443,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/interconnects", + "body": "interconnect_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertInterconnectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Interconnect.to_json( - request.interconnect_resource, + compute.Interconnect(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnects".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertInterconnectRequest.to_json( + compute.InsertInterconnectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertInterconnectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -333,10 +506,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListInterconnectsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InterconnectList: r"""Call the list method over HTTP. @@ -347,6 +522,9 @@ def list( Interconnects.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -357,30 +535,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnects".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/interconnects", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListInterconnectsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInterconnectsRequest.to_json( + compute.ListInterconnectsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInterconnectsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInterconnectsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInterconnectsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInterconnectsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListInterconnectsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -392,10 +592,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchInterconnectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -406,6 +608,9 @@ def patch( Interconnects.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -429,30 +634,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/interconnects/{interconnect}", + "body": "interconnect_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("interconnect", "interconnect"), + ("project", "project"), + ] + + request_kwargs = compute.PatchInterconnectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Interconnect.to_json( - request.interconnect_resource, + compute.Interconnect(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/interconnects/{interconnect}".format( - host=self._host, project=request.project, interconnect=request.interconnect, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchInterconnectRequest.to_json( + compute.PatchInterconnectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchInterconnectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -463,5 +698,43 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteInterconnectRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetInterconnectRequest], compute.Interconnect]: + return self._get + + @property + def get_diagnostics( + self, + ) -> Callable[ + [compute.GetDiagnosticsInterconnectRequest], + compute.InterconnectsGetDiagnosticsResponse, + ]: + return self._get_diagnostics + + @property + def insert( + self, + ) -> Callable[[compute.InsertInterconnectRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListInterconnectsRequest], compute.InterconnectList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchInterconnectRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("InterconnectsRestTransport",) diff --git a/google/cloud/compute_v1/services/license_codes/client.py b/google/cloud/compute_v1/services/license_codes/client.py index f2039cf9a..29b543060 100644 --- a/google/cloud/compute_v1/services/license_codes/client.py +++ b/google/cloud/compute_v1/services/license_codes/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.types import compute from .transports.base import LicenseCodesTransport, DEFAULT_CLIENT_INFO from .transports.rest import LicenseCodesRestTransport @@ -260,8 +264,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -323,15 +334,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetLicenseCodeRequest = None, + request: Union[compute.GetLicenseCodeRequest, dict] = None, *, project: str = None, license_code: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.LicenseCode: @@ -341,7 +353,7 @@ def get( third-party partners who are creating Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.GetLicenseCodeRequest): + request (Union[google.cloud.compute_v1.types.GetLicenseCodeRequest, dict]): The request object. A request message for LicenseCodes.Get. See the method description for details. @@ -407,12 +419,12 @@ def get( def test_iam_permissions( self, - request: compute.TestIamPermissionsLicenseCodeRequest = None, + request: Union[compute.TestIamPermissionsLicenseCodeRequest, dict] = None, *, project: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -421,7 +433,7 @@ def test_iam_permissions( partners who are creating Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsLicenseCodeRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsLicenseCodeRequest, dict]): The request object. A request message for LicenseCodes.TestIamPermissions. See the method description for details. @@ -491,6 +503,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/license_codes/transports/base.py b/google/cloud/compute_v1/services/license_codes/transports/base.py index 1a5800749..bd93c1ea4 100644 --- a/google/cloud/compute_v1/services/license_codes/transports/base.py +++ b/google/cloud/compute_v1/services/license_codes/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class LicenseCodesTransport(abc.ABC): """Abstract transport class for LicenseCodes.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -169,6 +133,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/license_codes/transports/rest.py b/google/cloud/compute_v1/services/license_codes/transports/rest.py index 86406d82c..d9734c72e 100644 --- a/google/cloud/compute_v1/services/license_codes/transports/rest.py +++ b/google/cloud/compute_v1/services/license_codes/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import LicenseCodesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import LicenseCodesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class LicenseCodesRestTransport(LicenseCodesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetLicenseCodeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.LicenseCode: r"""Call the get method over HTTP. @@ -112,6 +136,9 @@ def get( LicenseCodes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -125,20 +152,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenseCodes/{license_code}".format( - host=self._host, project=request.project, license_code=request.license_code, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/licenseCodes/{license_code}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("license_code", "licenseCode"), + ("project", "project"), + ] + + request_kwargs = compute.GetLicenseCodeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetLicenseCodeRequest.to_json( + compute.GetLicenseCodeRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -150,10 +210,12 @@ def get( response.content, ignore_unknown_fields=True ) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsLicenseCodeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -164,6 +226,9 @@ def test_iam_permissions( LicenseCodes.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -172,28 +237,62 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/licenseCodes/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsLicenseCodeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenseCodes/{resource}/testIamPermissions".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsLicenseCodeRequest.to_json( + compute.TestIamPermissionsLicenseCodeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -206,5 +305,20 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def get(self) -> Callable[[compute.GetLicenseCodeRequest], compute.LicenseCode]: + return self._get + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsLicenseCodeRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("LicenseCodesRestTransport",) diff --git a/google/cloud/compute_v1/services/licenses/client.py b/google/cloud/compute_v1/services/licenses/client.py index 624abc16e..6cfbf2b99 100644 --- a/google/cloud/compute_v1/services/licenses/client.py +++ b/google/cloud/compute_v1/services/licenses/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.licenses import pagers from google.cloud.compute_v1.types import compute from .transports.base import LicensesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,15 +335,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteLicenseRequest = None, + request: Union[compute.DeleteLicenseRequest, dict] = None, *, project: str = None, license_: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -341,7 +353,7 @@ def delete( Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.DeleteLicenseRequest): + request (Union[google.cloud.compute_v1.types.DeleteLicenseRequest, dict]): The request object. A request message for Licenses.Delete. See the method description for details. project (str): @@ -416,11 +428,11 @@ def delete( def get( self, - request: compute.GetLicenseRequest = None, + request: Union[compute.GetLicenseRequest, dict] = None, *, project: str = None, license_: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.License: @@ -429,7 +441,7 @@ def get( creating Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.GetLicenseRequest): + request (Union[google.cloud.compute_v1.types.GetLicenseRequest, dict]): The request object. A request message for Licenses.Get. See the method description for details. project (str): @@ -494,11 +506,11 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyLicenseRequest = None, + request: Union[compute.GetIamPolicyLicenseRequest, dict] = None, *, project: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -508,7 +520,7 @@ def get_iam_policy( Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.GetIamPolicyLicenseRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyLicenseRequest, dict]): The request object. A request message for Licenses.GetIamPolicy. See the method description for details. @@ -608,11 +620,11 @@ def get_iam_policy( def insert( self, - request: compute.InsertLicenseRequest = None, + request: Union[compute.InsertLicenseRequest, dict] = None, *, project: str = None, license_resource: compute.License = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -621,7 +633,7 @@ def insert( who are creating Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.InsertLicenseRequest): + request (Union[google.cloud.compute_v1.types.InsertLicenseRequest, dict]): The request object. A request message for Licenses.Insert. See the method description for details. project (str): @@ -694,10 +706,10 @@ def insert( def list( self, - request: compute.ListLicensesRequest = None, + request: Union[compute.ListLicensesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -712,7 +724,7 @@ def list( images. Args: - request (google.cloud.compute_v1.types.ListLicensesRequest): + request (Union[google.cloud.compute_v1.types.ListLicensesRequest, dict]): The request object. A request message for Licenses.List. See the method description for details. project (str): @@ -772,12 +784,12 @@ def list( def set_iam_policy( self, - request: compute.SetIamPolicyLicenseRequest = None, + request: Union[compute.SetIamPolicyLicenseRequest, dict] = None, *, project: str = None, resource: str = None, global_set_policy_request_resource: compute.GlobalSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -787,7 +799,7 @@ def set_iam_policy( Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.SetIamPolicyLicenseRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyLicenseRequest, dict]): The request object. A request message for Licenses.SetIamPolicy. See the method description for details. @@ -898,12 +910,12 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsLicenseRequest = None, + request: Union[compute.TestIamPermissionsLicenseRequest, dict] = None, *, project: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -912,7 +924,7 @@ def test_iam_permissions( partners who are creating Cloud Marketplace images. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsLicenseRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsLicenseRequest, dict]): The request object. A request message for Licenses.TestIamPermissions. See the method description for details. @@ -982,6 +994,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/licenses/pagers.py b/google/cloud/compute_v1/services/licenses/pagers.py index e22067820..04f56661d 100644 --- a/google/cloud/compute_v1/services/licenses/pagers.py +++ b/google/cloud/compute_v1/services/licenses/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.LicensesListResponse]: + def pages(self) -> Iterator[compute.LicensesListResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.License]: + def __iter__(self) -> Iterator[compute.License]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/licenses/transports/base.py b/google/cloud/compute_v1/services/licenses/transports/base.py index e0c15a542..d6d7e9308 100644 --- a/google/cloud/compute_v1/services/licenses/transports/base.py +++ b/google/cloud/compute_v1/services/licenses/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class LicensesTransport(abc.ABC): """Abstract transport class for Licenses.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -183,6 +147,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/licenses/transports/rest.py b/google/cloud/compute_v1/services/licenses/transports/rest.py index de84eb58b..f9d494c8b 100644 --- a/google/cloud/compute_v1/services/licenses/transports/rest.py +++ b/google/cloud/compute_v1/services/licenses/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import LicensesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import LicensesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class LicensesRestTransport(LicensesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteLicenseRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +136,9 @@ def delete( Licenses.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,23 +162,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses/{license}".format( - host=self._host, project=request.project, license=request.license_, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/licenses/{license_}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("license_", "license"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteLicenseRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteLicenseRequest.to_json( + compute.DeleteLicenseRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["license"] = request.license_ - if compute.DeleteLicenseRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -161,10 +218,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetLicenseRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.License: r"""Call the get method over HTTP. @@ -174,6 +233,9 @@ def get( The request object. A request message for Licenses.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -187,21 +249,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses/{license}".format( - host=self._host, project=request.project, license=request.license_, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/licenses/{license_}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("license_", "license"), + ("project", "project"), + ] + + request_kwargs = compute.GetLicenseRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetLicenseRequest.to_json( + compute.GetLicenseRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["license"] = request.license_ + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -211,10 +305,12 @@ def get( # Return the response return compute.License.from_json(response.content, ignore_unknown_fields=True) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyLicenseRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -225,6 +321,9 @@ def get_iam_policy( Licenses.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -273,27 +372,53 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses/{resource}/getIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/licenses/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyLicenseRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyLicenseRequest.to_json( + compute.GetIamPolicyLicenseRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyLicenseRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -303,10 +428,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertLicenseRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -317,6 +444,9 @@ def insert( Licenses.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -340,30 +470,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/licenses", + "body": "license_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertLicenseRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.License.to_json( - request.license_resource, + compute.License(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertLicenseRequest.to_json( + compute.InsertLicenseRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertLicenseRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -374,10 +533,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListLicensesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.LicensesListResponse: r"""Call the list method over HTTP. @@ -387,6 +548,9 @@ def list( The request object. A request message for Licenses.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -395,30 +559,49 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/global/licenses",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListLicensesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListLicensesRequest.to_json( + compute.ListLicensesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListLicensesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListLicensesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListLicensesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListLicensesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListLicensesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -430,10 +613,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyLicenseRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -444,6 +629,9 @@ def set_iam_policy( Licenses.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -492,28 +680,60 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/licenses/{resource}/setIamPolicy", + "body": "global_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyLicenseRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetPolicyRequest.to_json( - request.global_set_policy_request_resource, + compute.GlobalSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses/{resource}/setIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyLicenseRequest.to_json( + compute.SetIamPolicyLicenseRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -524,10 +744,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsLicenseRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -538,6 +760,9 @@ def test_iam_permissions( Licenses.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -546,28 +771,62 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/licenses/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsLicenseRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/licenses/{resource}/testIamPermissions".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsLicenseRequest.to_json( + compute.TestIamPermissionsLicenseRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -580,5 +839,46 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def delete(self) -> Callable[[compute.DeleteLicenseRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetLicenseRequest], compute.License]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyLicenseRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertLicenseRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListLicensesRequest], compute.LicensesListResponse]: + return self._list + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyLicenseRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsLicenseRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("LicensesRestTransport",) diff --git a/google/cloud/compute_v1/services/machine_types/client.py b/google/cloud/compute_v1/services/machine_types/client.py index 80a6650e8..fb72d673c 100644 --- a/google/cloud/compute_v1/services/machine_types/client.py +++ b/google/cloud/compute_v1/services/machine_types/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.machine_types import pagers from google.cloud.compute_v1.types import compute from .transports.base import MachineTypesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListMachineTypesRequest = None, + request: Union[compute.AggregatedListMachineTypesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of machine types. Args: - request (google.cloud.compute_v1.types.AggregatedListMachineTypesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListMachineTypesRequest, dict]): The request object. A request message for MachineTypes.AggregatedList. See the method description for details. @@ -399,12 +411,12 @@ def aggregated_list( def get( self, - request: compute.GetMachineTypeRequest = None, + request: Union[compute.GetMachineTypeRequest, dict] = None, *, project: str = None, zone: str = None, machine_type: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.MachineType: @@ -412,7 +424,7 @@ def get( available machine types by making a list() request. Args: - request (google.cloud.compute_v1.types.GetMachineTypeRequest): + request (Union[google.cloud.compute_v1.types.GetMachineTypeRequest, dict]): The request object. A request message for MachineTypes.Get. See the method description for details. @@ -485,11 +497,11 @@ def get( def list( self, - request: compute.ListMachineTypesRequest = None, + request: Union[compute.ListMachineTypesRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -497,7 +509,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListMachineTypesRequest): + request (Union[google.cloud.compute_v1.types.ListMachineTypesRequest, dict]): The request object. A request message for MachineTypes.List. See the method description for details. @@ -566,6 +578,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/machine_types/pagers.py b/google/cloud/compute_v1/services/machine_types/pagers.py index 03022c088..67323d298 100644 --- a/google/cloud/compute_v1/services/machine_types/pagers.py +++ b/google/cloud/compute_v1/services/machine_types/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.MachineTypeAggregatedList]: + def pages(self) -> Iterator[compute.MachineTypeAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.MachineTypesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.MachineTypesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.MachineTypeList]: + def pages(self) -> Iterator[compute.MachineTypeList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.MachineType]: + def __iter__(self) -> Iterator[compute.MachineType]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/machine_types/transports/base.py b/google/cloud/compute_v1/services/machine_types/transports/base.py index af7b3f966..18bdb8e82 100644 --- a/google/cloud/compute_v1/services/machine_types/transports/base.py +++ b/google/cloud/compute_v1/services/machine_types/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class MachineTypesTransport(abc.ABC): """Abstract transport class for MachineTypes.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -170,6 +134,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/machine_types/transports/rest.py b/google/cloud/compute_v1/services/machine_types/transports/rest.py index dd57321e7..7d3c49273 100644 --- a/google/cloud/compute_v1/services/machine_types/transports/rest.py +++ b/google/cloud/compute_v1/services/machine_types/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import MachineTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import MachineTypesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class MachineTypesRestTransport(MachineTypesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListMachineTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.MachineTypeAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( MachineTypes.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/machineTypes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/machineTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListMachineTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListMachineTypesRequest.to_json( + compute.AggregatedListMachineTypesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListMachineTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListMachineTypesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListMachineTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListMachineTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListMachineTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListMachineTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetMachineTypeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.MachineType: r"""Call the get method over HTTP. @@ -171,6 +222,9 @@ def get( MachineTypes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -184,23 +238,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/machineTypes/{machine_type}".format( - host=self._host, - project=request.project, - zone=request.zone, - machine_type=request.machine_type, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/machineTypes/{machine_type}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("machine_type", "machineType"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetMachineTypeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetMachineTypeRequest.to_json( + compute.GetMachineTypeRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -212,10 +297,12 @@ def get( response.content, ignore_unknown_fields=True ) - def list( + def _list( self, request: compute.ListMachineTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.MachineTypeList: r"""Call the list method over HTTP. @@ -226,6 +313,9 @@ def list( MachineTypes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -234,30 +324,53 @@ def list( Contains a list of machine types. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/machineTypes".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/machineTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListMachineTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListMachineTypesRequest.to_json( + compute.ListMachineTypesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListMachineTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListMachineTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListMachineTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListMachineTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListMachineTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -269,5 +382,26 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListMachineTypesRequest], compute.MachineTypeAggregatedList + ]: + return self._aggregated_list + + @property + def get(self) -> Callable[[compute.GetMachineTypeRequest], compute.MachineType]: + return self._get + + @property + def list( + self, + ) -> Callable[[compute.ListMachineTypesRequest], compute.MachineTypeList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("MachineTypesRestTransport",) diff --git a/google/cloud/compute_v1/services/network_endpoint_groups/client.py b/google/cloud/compute_v1/services/network_endpoint_groups/client.py index ea6f60199..00b96f7fe 100644 --- a/google/cloud/compute_v1/services/network_endpoint_groups/client.py +++ b/google/cloud/compute_v1/services/network_endpoint_groups/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.network_endpoint_groups import pagers from google.cloud.compute_v1.types import compute from .transports.base import NetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListNetworkEndpointGroupsRequest = None, + request: Union[compute.AggregatedListNetworkEndpointGroupsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -343,7 +355,7 @@ def aggregated_list( sorts them by zone. Args: - request (google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest, dict]): The request object. A request message for NetworkEndpointGroups.AggregatedList. See the method description for details. @@ -404,13 +416,15 @@ def aggregated_list( def attach_network_endpoints( self, - request: compute.AttachNetworkEndpointsNetworkEndpointGroupRequest = None, + request: Union[ + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, dict + ] = None, *, project: str = None, zone: str = None, network_endpoint_group: str = None, network_endpoint_groups_attach_endpoints_request_resource: compute.NetworkEndpointGroupsAttachEndpointsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -418,7 +432,7 @@ def attach_network_endpoints( network endpoint group. Args: - request (google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest, dict]): The request object. A request message for NetworkEndpointGroups.AttachNetworkEndpoints. See the method description for details. @@ -524,12 +538,12 @@ def attach_network_endpoints( def delete( self, - request: compute.DeleteNetworkEndpointGroupRequest = None, + request: Union[compute.DeleteNetworkEndpointGroupRequest, dict] = None, *, project: str = None, zone: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -540,7 +554,7 @@ def delete( services referencing it. Args: - request (google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest, dict]): The request object. A request message for NetworkEndpointGroups.Delete. See the method description for details. @@ -627,13 +641,15 @@ def delete( def detach_network_endpoints( self, - request: compute.DetachNetworkEndpointsNetworkEndpointGroupRequest = None, + request: Union[ + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, dict + ] = None, *, project: str = None, zone: str = None, network_endpoint_group: str = None, network_endpoint_groups_detach_endpoints_request_resource: compute.NetworkEndpointGroupsDetachEndpointsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -641,7 +657,7 @@ def detach_network_endpoints( network endpoint group. Args: - request (google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest, dict]): The request object. A request message for NetworkEndpointGroups.DetachNetworkEndpoints. See the method description for details. @@ -747,12 +763,12 @@ def detach_network_endpoints( def get( self, - request: compute.GetNetworkEndpointGroupRequest = None, + request: Union[compute.GetNetworkEndpointGroupRequest, dict] = None, *, project: str = None, zone: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroup: @@ -761,7 +777,7 @@ def get( list() request. Args: - request (google.cloud.compute_v1.types.GetNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.GetNetworkEndpointGroupRequest, dict]): The request object. A request message for NetworkEndpointGroups.Get. See the method description for details. @@ -843,12 +859,12 @@ def get( def insert( self, - request: compute.InsertNetworkEndpointGroupRequest = None, + request: Union[compute.InsertNetworkEndpointGroupRequest, dict] = None, *, project: str = None, zone: str = None, network_endpoint_group_resource: compute.NetworkEndpointGroup = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -857,7 +873,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest, dict]): The request object. A request message for NetworkEndpointGroups.Insert. See the method description for details. @@ -943,11 +959,11 @@ def insert( def list( self, - request: compute.ListNetworkEndpointGroupsRequest = None, + request: Union[compute.ListNetworkEndpointGroupsRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -955,7 +971,7 @@ def list( are located in the specified project and zone. Args: - request (google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest, dict]): The request object. A request message for NetworkEndpointGroups.List. See the method description for details. @@ -1026,13 +1042,15 @@ def list( def list_network_endpoints( self, - request: compute.ListNetworkEndpointsNetworkEndpointGroupsRequest = None, + request: Union[ + compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, dict + ] = None, *, project: str = None, zone: str = None, network_endpoint_group: str = None, network_endpoint_groups_list_endpoints_request_resource: compute.NetworkEndpointGroupsListEndpointsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNetworkEndpointsPager: @@ -1040,7 +1058,7 @@ def list_network_endpoints( endpoint group. Args: - request (google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest, dict]): The request object. A request message for NetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. @@ -1140,13 +1158,15 @@ def list_network_endpoints( def test_iam_permissions( self, - request: compute.TestIamPermissionsNetworkEndpointGroupRequest = None, + request: Union[ + compute.TestIamPermissionsNetworkEndpointGroupRequest, dict + ] = None, *, project: str = None, zone: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1154,7 +1174,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNetworkEndpointGroupRequest, dict]): The request object. A request message for NetworkEndpointGroups.TestIamPermissions. See the method description for details. @@ -1235,6 +1255,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py b/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py index d9191ba28..cb20a8c54 100644 --- a/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py +++ b/google/cloud/compute_v1/services/network_endpoint_groups/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkEndpointGroupAggregatedList]: + def pages(self) -> Iterator[compute.NetworkEndpointGroupAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.NetworkEndpointGroupsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.NetworkEndpointGroupsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkEndpointGroupList]: + def pages(self) -> Iterator[compute.NetworkEndpointGroupList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NetworkEndpointGroup]: + def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: for page in self.pages: yield from page.items @@ -203,14 +203,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkEndpointGroupsListNetworkEndpoints]: + def pages(self) -> Iterator[compute.NetworkEndpointGroupsListNetworkEndpoints]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NetworkEndpointWithHealthStatus]: + def __iter__(self) -> Iterator[compute.NetworkEndpointWithHealthStatus]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py b/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py index 30d8036b0..4bf5d33eb 100644 --- a/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py +++ b/google/cloud/compute_v1/services/network_endpoint_groups/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class NetworkEndpointGroupsTransport(abc.ABC): """Abstract transport class for NetworkEndpointGroups.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -195,6 +159,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py b/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py index d7ecb2650..eb2a67a07 100644 --- a/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py +++ b/google/cloud/compute_v1/services/network_endpoint_groups/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + NetworkEndpointGroupsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import NetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class NetworkEndpointGroupsRestTransport(NetworkEndpointGroupsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListNetworkEndpointGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroupAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( NetworkEndpointGroups.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,38 +150,56 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/networkEndpointGroups".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListNetworkEndpointGroupsRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.AggregatedListNetworkEndpointGroupsRequest.include_all_scopes - in request - ): - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListNetworkEndpointGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListNetworkEndpointGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListNetworkEndpointGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListNetworkEndpointGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/networkEndpointGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListNetworkEndpointGroupsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListNetworkEndpointGroupsRequest.to_json( + compute.AggregatedListNetworkEndpointGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +211,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def attach_network_endpoints( + def _attach_network_endpoints( self, request: compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the attach network endpoints method over HTTP. @@ -177,6 +227,9 @@ def attach_network_endpoints( NetworkEndpointGroups.AttachNetworkEndpoints. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -200,36 +253,67 @@ def attach_network_endpoints( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints", + "body": "network_endpoint_groups_attach_endpoints_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkEndpointGroupsAttachEndpointsRequest.to_json( - request.network_endpoint_groups_attach_endpoints_request_resource, + compute.NetworkEndpointGroupsAttachEndpointsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints".format( - host=self._host, - project=request.project, - zone=request.zone, - network_endpoint_group=request.network_endpoint_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.to_json( + compute.AttachNetworkEndpointsNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.AttachNetworkEndpointsNetworkEndpointGroupRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -240,10 +324,12 @@ def attach_network_endpoints( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -254,6 +340,9 @@ def delete( NetworkEndpointGroups.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -277,25 +366,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteNetworkEndpointGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteNetworkEndpointGroupRequest.to_json( + compute.DeleteNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteNetworkEndpointGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -305,10 +425,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def detach_network_endpoints( + def _detach_network_endpoints( self, request: compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the detach network endpoints method over HTTP. @@ -319,6 +441,9 @@ def detach_network_endpoints( NetworkEndpointGroups.DetachNetworkEndpoints. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -342,36 +467,67 @@ def detach_network_endpoints( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints", + "body": "network_endpoint_groups_detach_endpoints_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkEndpointGroupsDetachEndpointsRequest.to_json( - request.network_endpoint_groups_detach_endpoints_request_resource, + compute.NetworkEndpointGroupsDetachEndpointsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints".format( - host=self._host, - project=request.project, - zone=request.zone, - network_endpoint_group=request.network_endpoint_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.to_json( + compute.DetachNetworkEndpointsNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.DetachNetworkEndpointsNetworkEndpointGroupRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -382,10 +538,12 @@ def detach_network_endpoints( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroup: r"""Call the get method over HTTP. @@ -396,6 +554,9 @@ def get( NetworkEndpointGroups.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -415,23 +576,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetNetworkEndpointGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetNetworkEndpointGroupRequest.to_json( + compute.GetNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -443,10 +637,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -457,6 +653,9 @@ def insert( NetworkEndpointGroups.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -480,30 +679,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups", + "body": "network_endpoint_group_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertNetworkEndpointGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkEndpointGroup.to_json( - request.network_endpoint_group_resource, + compute.NetworkEndpointGroup(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertNetworkEndpointGroupRequest.to_json( + compute.InsertNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertNetworkEndpointGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -514,10 +745,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListNetworkEndpointGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroupList: r"""Call the list method over HTTP. @@ -528,6 +761,9 @@ def list( NetworkEndpointGroups.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -536,30 +772,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups".format( - host=self._host, project=request.project, zone=request.zone, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNetworkEndpointGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListNetworkEndpointGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListNetworkEndpointGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListNetworkEndpointGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListNetworkEndpointGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListNetworkEndpointGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNetworkEndpointGroupsRequest.to_json( + compute.ListNetworkEndpointGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -571,10 +832,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_network_endpoints( + def _list_network_endpoints( self, request: compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroupsListNetworkEndpoints: r"""Call the list network endpoints method over HTTP. @@ -585,6 +848,9 @@ def list_network_endpoints( NetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -593,50 +859,67 @@ def list_network_endpoints( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints", + "body": "network_endpoint_groups_list_endpoints_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkEndpointGroupsListEndpointsRequest.to_json( - request.network_endpoint_groups_list_endpoints_request_resource, + compute.NetworkEndpointGroupsListEndpointsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.to_json( + compute.ListNetworkEndpointsNetworkEndpointGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints".format( - host=self._host, - project=request.project, - zone=request.zone, - network_endpoint_group=request.network_endpoint_group, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if ( - compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListNetworkEndpointsNetworkEndpointGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -649,10 +932,12 @@ def list_network_endpoints( response.content, ignore_unknown_fields=True ) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -663,6 +948,9 @@ def test_iam_permissions( NetworkEndpointGroups.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -671,31 +959,65 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.TestIamPermissionsNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsNetworkEndpointGroupRequest.to_json( + compute.TestIamPermissionsNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -708,5 +1030,79 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupAggregatedList, + ]: + return self._aggregated_list + + @property + def attach_network_endpoints( + self, + ) -> Callable[ + [compute.AttachNetworkEndpointsNetworkEndpointGroupRequest], compute.Operation + ]: + return self._attach_network_endpoints + + @property + def delete( + self, + ) -> Callable[[compute.DeleteNetworkEndpointGroupRequest], compute.Operation]: + return self._delete + + @property + def detach_network_endpoints( + self, + ) -> Callable[ + [compute.DetachNetworkEndpointsNetworkEndpointGroupRequest], compute.Operation + ]: + return self._detach_network_endpoints + + @property + def get( + self, + ) -> Callable[ + [compute.GetNetworkEndpointGroupRequest], compute.NetworkEndpointGroup + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertNetworkEndpointGroupRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListNetworkEndpointGroupsRequest], compute.NetworkEndpointGroupList + ]: + return self._list + + @property + def list_network_endpoints( + self, + ) -> Callable[ + [compute.ListNetworkEndpointsNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupsListNetworkEndpoints, + ]: + return self._list_network_endpoints + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsNetworkEndpointGroupRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("NetworkEndpointGroupsRestTransport",) diff --git a/google/cloud/compute_v1/services/networks/client.py b/google/cloud/compute_v1/services/networks/client.py index 1b338d41f..8a5ffbb93 100644 --- a/google/cloud/compute_v1/services/networks/client.py +++ b/google/cloud/compute_v1/services/networks/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.networks import pagers from google.cloud.compute_v1.types import compute from .transports.base import NetworksTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,23 +335,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_peering( self, - request: compute.AddPeeringNetworkRequest = None, + request: Union[compute.AddPeeringNetworkRequest, dict] = None, *, project: str = None, network: str = None, networks_add_peering_request_resource: compute.NetworksAddPeeringRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Adds a peering to the specified network. Args: - request (google.cloud.compute_v1.types.AddPeeringNetworkRequest): + request (Union[google.cloud.compute_v1.types.AddPeeringNetworkRequest, dict]): The request object. A request message for Networks.AddPeering. See the method description for details. @@ -427,18 +439,18 @@ def add_peering( def delete( self, - request: compute.DeleteNetworkRequest = None, + request: Union[compute.DeleteNetworkRequest, dict] = None, *, project: str = None, network: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified network. Args: - request (google.cloud.compute_v1.types.DeleteNetworkRequest): + request (Union[google.cloud.compute_v1.types.DeleteNetworkRequest, dict]): The request object. A request message for Networks.Delete. See the method description for details. project (str): @@ -511,11 +523,11 @@ def delete( def get( self, - request: compute.GetNetworkRequest = None, + request: Union[compute.GetNetworkRequest, dict] = None, *, project: str = None, network: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Network: @@ -523,7 +535,7 @@ def get( available networks by making a list() request. Args: - request (google.cloud.compute_v1.types.GetNetworkRequest): + request (Union[google.cloud.compute_v1.types.GetNetworkRequest, dict]): The request object. A request message for Networks.Get. See the method description for details. project (str): @@ -586,18 +598,18 @@ def get( def get_effective_firewalls( self, - request: compute.GetEffectiveFirewallsNetworkRequest = None, + request: Union[compute.GetEffectiveFirewallsNetworkRequest, dict] = None, *, project: str = None, network: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworksGetEffectiveFirewallsResponse: r"""Returns the effective firewalls on a given network. Args: - request (google.cloud.compute_v1.types.GetEffectiveFirewallsNetworkRequest): + request (Union[google.cloud.compute_v1.types.GetEffectiveFirewallsNetworkRequest, dict]): The request object. A request message for Networks.GetEffectiveFirewalls. See the method description for details. @@ -656,11 +668,11 @@ def get_effective_firewalls( def insert( self, - request: compute.InsertNetworkRequest = None, + request: Union[compute.InsertNetworkRequest, dict] = None, *, project: str = None, network_resource: compute.Network = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -668,7 +680,7 @@ def insert( data included in the request. Args: - request (google.cloud.compute_v1.types.InsertNetworkRequest): + request (Union[google.cloud.compute_v1.types.InsertNetworkRequest, dict]): The request object. A request message for Networks.Insert. See the method description for details. project (str): @@ -741,10 +753,10 @@ def insert( def list( self, - request: compute.ListNetworksRequest = None, + request: Union[compute.ListNetworksRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -752,7 +764,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListNetworksRequest): + request (Union[google.cloud.compute_v1.types.ListNetworksRequest, dict]): The request object. A request message for Networks.List. See the method description for details. project (str): @@ -813,11 +825,11 @@ def list( def list_peering_routes( self, - request: compute.ListPeeringRoutesNetworksRequest = None, + request: Union[compute.ListPeeringRoutesNetworksRequest, dict] = None, *, project: str = None, network: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPeeringRoutesPager: @@ -825,7 +837,7 @@ def list_peering_routes( connection. Args: - request (google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest): + request (Union[google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest, dict]): The request object. A request message for Networks.ListPeeringRoutes. See the method description for details. @@ -893,12 +905,12 @@ def list_peering_routes( def patch( self, - request: compute.PatchNetworkRequest = None, + request: Union[compute.PatchNetworkRequest, dict] = None, *, project: str = None, network: str = None, network_resource: compute.Network = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -907,7 +919,7 @@ def patch( modified: routingConfig.routingMode. Args: - request (google.cloud.compute_v1.types.PatchNetworkRequest): + request (Union[google.cloud.compute_v1.types.PatchNetworkRequest, dict]): The request object. A request message for Networks.Patch. See the method description for details. project (str): @@ -987,19 +999,19 @@ def patch( def remove_peering( self, - request: compute.RemovePeeringNetworkRequest = None, + request: Union[compute.RemovePeeringNetworkRequest, dict] = None, *, project: str = None, network: str = None, networks_remove_peering_request_resource: compute.NetworksRemovePeeringRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Removes a peering from the specified network. Args: - request (google.cloud.compute_v1.types.RemovePeeringNetworkRequest): + request (Union[google.cloud.compute_v1.types.RemovePeeringNetworkRequest, dict]): The request object. A request message for Networks.RemovePeering. See the method description for details. @@ -1086,11 +1098,11 @@ def remove_peering( def switch_to_custom_mode( self, - request: compute.SwitchToCustomModeNetworkRequest = None, + request: Union[compute.SwitchToCustomModeNetworkRequest, dict] = None, *, project: str = None, network: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1098,7 +1110,7 @@ def switch_to_custom_mode( custom subnet mode. Args: - request (google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest): + request (Union[google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest, dict]): The request object. A request message for Networks.SwitchToCustomMode. See the method description for details. @@ -1172,12 +1184,12 @@ def switch_to_custom_mode( def update_peering( self, - request: compute.UpdatePeeringNetworkRequest = None, + request: Union[compute.UpdatePeeringNetworkRequest, dict] = None, *, project: str = None, network: str = None, networks_update_peering_request_resource: compute.NetworksUpdatePeeringRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1187,7 +1199,7 @@ def update_peering( NetworkPeering.import_custom_routes field. Args: - request (google.cloud.compute_v1.types.UpdatePeeringNetworkRequest): + request (Union[google.cloud.compute_v1.types.UpdatePeeringNetworkRequest, dict]): The request object. A request message for Networks.UpdatePeering. See the method description for details. @@ -1272,6 +1284,19 @@ def update_peering( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/networks/pagers.py b/google/cloud/compute_v1/services/networks/pagers.py index 647a3325e..ccbf210d7 100644 --- a/google/cloud/compute_v1/services/networks/pagers.py +++ b/google/cloud/compute_v1/services/networks/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkList]: + def pages(self) -> Iterator[compute.NetworkList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Network]: + def __iter__(self) -> Iterator[compute.Network]: for page in self.pages: yield from page.items @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ExchangedPeeringRoutesList]: + def pages(self) -> Iterator[compute.ExchangedPeeringRoutesList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ExchangedPeeringRoute]: + def __iter__(self) -> Iterator[compute.ExchangedPeeringRoute]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/networks/transports/base.py b/google/cloud/compute_v1/services/networks/transports/base.py index e10e936be..d9e0787bc 100644 --- a/google/cloud/compute_v1/services/networks/transports/base.py +++ b/google/cloud/compute_v1/services/networks/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class NetworksTransport(abc.ABC): """Abstract transport class for Networks.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -197,6 +161,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_peering( self, diff --git a/google/cloud/compute_v1/services/networks/transports/rest.py b/google/cloud/compute_v1/services/networks/transports/rest.py index dbd6dad47..10fc51dda 100644 --- a/google/cloud/compute_v1/services/networks/transports/rest.py +++ b/google/cloud/compute_v1/services/networks/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import NetworksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import NetworksTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class NetworksRestTransport(NetworksTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_peering( + def _add_peering( self, request: compute.AddPeeringNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add peering method over HTTP. @@ -112,6 +136,9 @@ def add_peering( Networks.AddPeering. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,30 +162,60 @@ def add_peering( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networks/{network}/addPeering", + "body": "networks_add_peering_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.AddPeeringNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworksAddPeeringRequest.to_json( - request.networks_add_peering_request_resource, + compute.NetworksAddPeeringRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}/addPeering".format( - host=self._host, project=request.project, network=request.network, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddPeeringNetworkRequest.to_json( + compute.AddPeeringNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddPeeringNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -169,10 +226,12 @@ def add_peering( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -183,6 +242,9 @@ def delete( Networks.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -206,22 +268,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}".format( - host=self._host, project=request.project, network=request.network, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/networks/{network}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.DeleteNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteNetworkRequest.to_json( + compute.DeleteNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -231,10 +324,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Network: r"""Call the get method over HTTP. @@ -244,6 +339,9 @@ def get( The request object. A request message for Networks.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -257,20 +355,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}".format( - host=self._host, project=request.project, network=request.network, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/networks/{network}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.GetNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetNetworkRequest.to_json( + compute.GetNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -280,10 +411,12 @@ def get( # Return the response return compute.Network.from_json(response.content, ignore_unknown_fields=True) - def get_effective_firewalls( + def _get_effective_firewalls( self, request: compute.GetEffectiveFirewallsNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworksGetEffectiveFirewallsResponse: r"""Call the get effective firewalls method over HTTP. @@ -294,6 +427,9 @@ def get_effective_firewalls( Networks.GetEffectiveFirewalls. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -302,20 +438,55 @@ def get_effective_firewalls( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}/getEffectiveFirewalls".format( - host=self._host, project=request.project, network=request.network, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/networks/{network}/getEffectiveFirewalls", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.GetEffectiveFirewallsNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetEffectiveFirewallsNetworkRequest.to_json( + compute.GetEffectiveFirewallsNetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -327,10 +498,12 @@ def get_effective_firewalls( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -341,6 +514,9 @@ def insert( Networks.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -364,30 +540,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networks", + "body": "network_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Network.to_json( - request.network_resource, + compute.Network(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertNetworkRequest.to_json( + compute.InsertNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -398,10 +603,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListNetworksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkList: r"""Call the list method over HTTP. @@ -411,6 +618,9 @@ def list( The request object. A request message for Networks.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -419,30 +629,49 @@ def list( Contains a list of networks. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNetworksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListNetworksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListNetworksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListNetworksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListNetworksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/global/networks",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListNetworksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNetworksRequest.to_json( + compute.ListNetworksRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -454,10 +683,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_peering_routes( + def _list_peering_routes( self, request: compute.ListPeeringRoutesNetworksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ExchangedPeeringRoutesList: r"""Call the list peering routes method over HTTP. @@ -468,6 +699,9 @@ def list_peering_routes( Networks.ListPeeringRoutes. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -476,36 +710,55 @@ def list_peering_routes( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}/listPeeringRoutes".format( - host=self._host, project=request.project, network=request.network, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListPeeringRoutesNetworksRequest.direction in request: - query_params["direction"] = request.direction - if compute.ListPeeringRoutesNetworksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListPeeringRoutesNetworksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListPeeringRoutesNetworksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListPeeringRoutesNetworksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListPeeringRoutesNetworksRequest.peering_name in request: - query_params["peeringName"] = request.peering_name - if compute.ListPeeringRoutesNetworksRequest.region in request: - query_params["region"] = request.region - if compute.ListPeeringRoutesNetworksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/networks/{network}/listPeeringRoutes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.ListPeeringRoutesNetworksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPeeringRoutesNetworksRequest.to_json( + compute.ListPeeringRoutesNetworksRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -517,10 +770,12 @@ def list_peering_routes( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -530,6 +785,9 @@ def patch( The request object. A request message for Networks.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -553,30 +811,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/networks/{network}", + "body": "network_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.PatchNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Network.to_json( - request.network_resource, + compute.Network(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}".format( - host=self._host, project=request.project, network=request.network, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchNetworkRequest.to_json( + compute.PatchNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -587,10 +875,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def remove_peering( + def _remove_peering( self, request: compute.RemovePeeringNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove peering method over HTTP. @@ -601,6 +891,9 @@ def remove_peering( Networks.RemovePeering. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -624,30 +917,60 @@ def remove_peering( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networks/{network}/removePeering", + "body": "networks_remove_peering_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.RemovePeeringNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworksRemovePeeringRequest.to_json( - request.networks_remove_peering_request_resource, + compute.NetworksRemovePeeringRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}/removePeering".format( - host=self._host, project=request.project, network=request.network, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemovePeeringNetworkRequest.to_json( + compute.RemovePeeringNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemovePeeringNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -658,10 +981,12 @@ def remove_peering( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def switch_to_custom_mode( + def _switch_to_custom_mode( self, request: compute.SwitchToCustomModeNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the switch to custom mode method over HTTP. @@ -672,6 +997,9 @@ def switch_to_custom_mode( Networks.SwitchToCustomMode. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -695,22 +1023,55 @@ def switch_to_custom_mode( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}/switchToCustomMode".format( - host=self._host, project=request.project, network=request.network, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/networks/{network}/switchToCustomMode", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.SwitchToCustomModeNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SwitchToCustomModeNetworkRequest.to_json( + compute.SwitchToCustomModeNetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SwitchToCustomModeNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -720,10 +1081,12 @@ def switch_to_custom_mode( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_peering( + def _update_peering( self, request: compute.UpdatePeeringNetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update peering method over HTTP. @@ -734,6 +1097,9 @@ def update_peering( Networks.UpdatePeering. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -757,30 +1123,60 @@ def update_peering( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/networks/{network}/updatePeering", + "body": "networks_update_peering_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network", "network"), + ("project", "project"), + ] + + request_kwargs = compute.UpdatePeeringNetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworksUpdatePeeringRequest.to_json( - request.networks_update_peering_request_resource, + compute.NetworksUpdatePeeringRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/networks/{network}/updatePeering".format( - host=self._host, project=request.project, network=request.network, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdatePeeringNetworkRequest.to_json( + compute.UpdatePeeringNetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdatePeeringNetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -791,5 +1187,69 @@ def update_peering( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_peering( + self, + ) -> Callable[[compute.AddPeeringNetworkRequest], compute.Operation]: + return self._add_peering + + @property + def delete(self) -> Callable[[compute.DeleteNetworkRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetNetworkRequest], compute.Network]: + return self._get + + @property + def get_effective_firewalls( + self, + ) -> Callable[ + [compute.GetEffectiveFirewallsNetworkRequest], + compute.NetworksGetEffectiveFirewallsResponse, + ]: + return self._get_effective_firewalls + + @property + def insert(self) -> Callable[[compute.InsertNetworkRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListNetworksRequest], compute.NetworkList]: + return self._list + + @property + def list_peering_routes( + self, + ) -> Callable[ + [compute.ListPeeringRoutesNetworksRequest], compute.ExchangedPeeringRoutesList + ]: + return self._list_peering_routes + + @property + def patch(self) -> Callable[[compute.PatchNetworkRequest], compute.Operation]: + return self._patch + + @property + def remove_peering( + self, + ) -> Callable[[compute.RemovePeeringNetworkRequest], compute.Operation]: + return self._remove_peering + + @property + def switch_to_custom_mode( + self, + ) -> Callable[[compute.SwitchToCustomModeNetworkRequest], compute.Operation]: + return self._switch_to_custom_mode + + @property + def update_peering( + self, + ) -> Callable[[compute.UpdatePeeringNetworkRequest], compute.Operation]: + return self._update_peering + + def close(self): + self._session.close() + __all__ = ("NetworksRestTransport",) diff --git a/google/cloud/compute_v1/services/node_groups/client.py b/google/cloud/compute_v1/services/node_groups/client.py index e1dbe0070..1c6bc7b66 100644 --- a/google/cloud/compute_v1/services/node_groups/client.py +++ b/google/cloud/compute_v1/services/node_groups/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.node_groups import pagers from google.cloud.compute_v1.types import compute from .transports.base import NodeGroupsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,24 +335,25 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_nodes( self, - request: compute.AddNodesNodeGroupRequest = None, + request: Union[compute.AddNodesNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, node_groups_add_nodes_request_resource: compute.NodeGroupsAddNodesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Adds specified number of nodes to the node group. Args: - request (google.cloud.compute_v1.types.AddNodesNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.AddNodesNodeGroupRequest, dict]): The request object. A request message for NodeGroups.AddNodes. See the method description for details. @@ -435,10 +447,10 @@ def add_nodes( def aggregated_list( self, - request: compute.AggregatedListNodeGroupsRequest = None, + request: Union[compute.AggregatedListNodeGroupsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -447,7 +459,7 @@ def aggregated_list( group. Args: - request (google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest, dict]): The request object. A request message for NodeGroups.AggregatedList. See the method description for details. @@ -508,19 +520,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteNodeGroupRequest = None, + request: Union[compute.DeleteNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified NodeGroup resource. Args: - request (google.cloud.compute_v1.types.DeleteNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.DeleteNodeGroupRequest, dict]): The request object. A request message for NodeGroups.Delete. See the method description for details. @@ -605,20 +617,20 @@ def delete( def delete_nodes( self, - request: compute.DeleteNodesNodeGroupRequest = None, + request: Union[compute.DeleteNodesNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, node_groups_delete_nodes_request_resource: compute.NodeGroupsDeleteNodesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes specified nodes from the node group. Args: - request (google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest, dict]): The request object. A request message for NodeGroups.DeleteNodes. See the method description for details. @@ -714,12 +726,12 @@ def delete_nodes( def get( self, - request: compute.GetNodeGroupRequest = None, + request: Union[compute.GetNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeGroup: @@ -729,7 +741,7 @@ def get( nodeGroups.listNodes instead. Args: - request (google.cloud.compute_v1.types.GetNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.GetNodeGroupRequest, dict]): The request object. A request message for NodeGroups.Get. See the method description for details. project (str): @@ -807,12 +819,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyNodeGroupRequest = None, + request: Union[compute.GetIamPolicyNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -820,7 +832,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyNodeGroupRequest, dict]): The request object. A request message for NodeGroups.GetIamPolicy. See the method description for details. @@ -929,13 +941,13 @@ def get_iam_policy( def insert( self, - request: compute.InsertNodeGroupRequest = None, + request: Union[compute.InsertNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, initial_node_count: int = None, node_group_resource: compute.NodeGroup = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -943,7 +955,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.InsertNodeGroupRequest, dict]): The request object. A request message for NodeGroups.Insert. See the method description for details. @@ -1037,11 +1049,11 @@ def insert( def list( self, - request: compute.ListNodeGroupsRequest = None, + request: Union[compute.ListNodeGroupsRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1050,7 +1062,7 @@ def list( more details about each group. Args: - request (google.cloud.compute_v1.types.ListNodeGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListNodeGroupsRequest, dict]): The request object. A request message for NodeGroups.List. See the method description for details. project (str): @@ -1120,19 +1132,19 @@ def list( def list_nodes( self, - request: compute.ListNodesNodeGroupsRequest = None, + request: Union[compute.ListNodesNodeGroupsRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListNodesPager: r"""Lists nodes in the node group. Args: - request (google.cloud.compute_v1.types.ListNodesNodeGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListNodesNodeGroupsRequest, dict]): The request object. A request message for NodeGroups.ListNodes. See the method description for details. @@ -1211,20 +1223,20 @@ def list_nodes( def patch( self, - request: compute.PatchNodeGroupRequest = None, + request: Union[compute.PatchNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, node_group_resource: compute.NodeGroup = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Updates the specified node group. Args: - request (google.cloud.compute_v1.types.PatchNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.PatchNodeGroupRequest, dict]): The request object. A request message for NodeGroups.Patch. See the method description for details. @@ -1316,13 +1328,13 @@ def patch( def set_iam_policy( self, - request: compute.SetIamPolicyNodeGroupRequest = None, + request: Union[compute.SetIamPolicyNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, zone_set_policy_request_resource: compute.ZoneSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1330,7 +1342,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyNodeGroupRequest, dict]): The request object. A request message for NodeGroups.SetIamPolicy. See the method description for details. @@ -1450,20 +1462,20 @@ def set_iam_policy( def set_node_template( self, - request: compute.SetNodeTemplateNodeGroupRequest = None, + request: Union[compute.SetNodeTemplateNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, node_group: str = None, node_groups_set_node_template_request_resource: compute.NodeGroupsSetNodeTemplateRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Updates the node template of the node group. Args: - request (google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest, dict]): The request object. A request message for NodeGroups.SetNodeTemplate. See the method description for details. @@ -1559,13 +1571,13 @@ def set_node_template( def test_iam_permissions( self, - request: compute.TestIamPermissionsNodeGroupRequest = None, + request: Union[compute.TestIamPermissionsNodeGroupRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1573,7 +1585,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsNodeGroupRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNodeGroupRequest, dict]): The request object. A request message for NodeGroups.TestIamPermissions. See the method description for details. @@ -1652,6 +1664,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/node_groups/pagers.py b/google/cloud/compute_v1/services/node_groups/pagers.py index f372a3a7d..c390991a3 100644 --- a/google/cloud/compute_v1/services/node_groups/pagers.py +++ b/google/cloud/compute_v1/services/node_groups/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeGroupAggregatedList]: + def pages(self) -> Iterator[compute.NodeGroupAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.NodeGroupsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.NodeGroupsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeGroupList]: + def pages(self) -> Iterator[compute.NodeGroupList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NodeGroup]: + def __iter__(self) -> Iterator[compute.NodeGroup]: for page in self.pages: yield from page.items @@ -201,14 +201,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeGroupsListNodes]: + def pages(self) -> Iterator[compute.NodeGroupsListNodes]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NodeGroupNode]: + def __iter__(self) -> Iterator[compute.NodeGroupNode]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/node_groups/transports/base.py b/google/cloud/compute_v1/services/node_groups/transports/base.py index eedf04e8b..145c46fa3 100644 --- a/google/cloud/compute_v1/services/node_groups/transports/base.py +++ b/google/cloud/compute_v1/services/node_groups/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class NodeGroupsTransport(abc.ABC): """Abstract transport class for NodeGroups.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -201,6 +165,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_nodes( self, diff --git a/google/cloud/compute_v1/services/node_groups/transports/rest.py b/google/cloud/compute_v1/services/node_groups/transports/rest.py index 3e462c744..ab0d28e99 100644 --- a/google/cloud/compute_v1/services/node_groups/transports/rest.py +++ b/google/cloud/compute_v1/services/node_groups/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import NodeGroupsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import NodeGroupsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class NodeGroupsRestTransport(NodeGroupsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_nodes( + def _add_nodes( self, request: compute.AddNodesNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add nodes method over HTTP. @@ -112,6 +136,9 @@ def add_nodes( NodeGroups.AddNodes. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,33 +162,61 @@ def add_nodes( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/addNodes", + "body": "node_groups_add_nodes_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.AddNodesNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NodeGroupsAddNodesRequest.to_json( - request.node_groups_add_nodes_request_resource, + compute.NodeGroupsAddNodesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/addNodes".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddNodesNodeGroupRequest.to_json( + compute.AddNodesNodeGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddNodesNodeGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -172,10 +227,12 @@ def add_nodes( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListNodeGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeGroupAggregatedList: r"""Call the aggregated list method over HTTP. @@ -186,6 +243,9 @@ def aggregated_list( NodeGroups.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,32 +254,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/nodeGroups".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListNodeGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListNodeGroupsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListNodeGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListNodeGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListNodeGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListNodeGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/nodeGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListNodeGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListNodeGroupsRequest.to_json( + compute.AggregatedListNodeGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -231,10 +313,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -245,6 +329,9 @@ def delete( NodeGroups.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -268,25 +355,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteNodeGroupRequest.to_json( + compute.DeleteNodeGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteNodeGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -296,10 +412,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_nodes( + def _delete_nodes( self, request: compute.DeleteNodesNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete nodes method over HTTP. @@ -310,6 +428,9 @@ def delete_nodes( NodeGroups.DeleteNodes. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -333,33 +454,61 @@ def delete_nodes( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/deleteNodes", + "body": "node_groups_delete_nodes_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteNodesNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NodeGroupsDeleteNodesRequest.to_json( - request.node_groups_delete_nodes_request_resource, + compute.NodeGroupsDeleteNodesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/deleteNodes".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteNodesNodeGroupRequest.to_json( + compute.DeleteNodesNodeGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteNodesNodeGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -370,10 +519,12 @@ def delete_nodes( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeGroup: r"""Call the get method over HTTP. @@ -383,6 +534,9 @@ def get( The request object. A request message for NodeGroups.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -402,23 +556,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetNodeGroupRequest.to_json( + compute.GetNodeGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -428,10 +613,12 @@ def get( # Return the response return compute.NodeGroup.from_json(response.content, ignore_unknown_fields=True) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -442,6 +629,9 @@ def get_iam_policy( NodeGroups.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -490,30 +680,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyNodeGroupRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetIamPolicyNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyNodeGroupRequest.to_json( + compute.GetIamPolicyNodeGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -523,10 +739,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -537,6 +755,9 @@ def insert( NodeGroups.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -560,31 +781,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups", + "body": "node_group_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("initial_node_count", "initialNodeCount"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NodeGroup.to_json( - request.node_group_resource, + compute.NodeGroup(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertNodeGroupRequest.to_json( + compute.InsertNodeGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - query_params["initialNodeCount"] = request.initial_node_count - if compute.InsertNodeGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -595,10 +846,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListNodeGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeGroupList: r"""Call the list method over HTTP. @@ -609,6 +862,9 @@ def list( NodeGroups.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -617,30 +873,53 @@ def list( Contains a list of nodeGroups. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups".format( - host=self._host, project=request.project, zone=request.zone, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNodeGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListNodeGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListNodeGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListNodeGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListNodeGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListNodeGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNodeGroupsRequest.to_json( + compute.ListNodeGroupsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -652,10 +931,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_nodes( + def _list_nodes( self, request: compute.ListNodesNodeGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeGroupsListNodes: r"""Call the list nodes method over HTTP. @@ -666,6 +947,9 @@ def list_nodes( NodeGroups.ListNodes. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -674,33 +958,54 @@ def list_nodes( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/listNodes".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNodesNodeGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListNodesNodeGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListNodesNodeGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListNodesNodeGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListNodesNodeGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/listNodes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListNodesNodeGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNodesNodeGroupsRequest.to_json( + compute.ListNodesNodeGroupsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -712,10 +1017,12 @@ def list_nodes( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -726,6 +1033,9 @@ def patch( NodeGroups.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -749,33 +1059,61 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}", + "body": "node_group_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.PatchNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NodeGroup.to_json( - request.node_group_resource, + compute.NodeGroup(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchNodeGroupRequest.to_json( + compute.PatchNodeGroupRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchNodeGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -786,10 +1124,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -800,6 +1140,9 @@ def set_iam_policy( NodeGroups.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -848,31 +1191,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/setIamPolicy", + "body": "zone_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetIamPolicyNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ZoneSetPolicyRequest.to_json( - request.zone_set_policy_request_resource, + compute.ZoneSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyNodeGroupRequest.to_json( + compute.SetIamPolicyNodeGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -883,10 +1258,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_node_template( + def _set_node_template( self, request: compute.SetNodeTemplateNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set node template method over HTTP. @@ -897,6 +1274,9 @@ def set_node_template( NodeGroups.SetNodeTemplate. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -920,33 +1300,63 @@ def set_node_template( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/setNodeTemplate", + "body": "node_groups_set_node_template_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_group", "nodeGroup"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetNodeTemplateNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NodeGroupsSetNodeTemplateRequest.to_json( - request.node_groups_set_node_template_request_resource, + compute.NodeGroupsSetNodeTemplateRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/setNodeTemplate".format( - host=self._host, - project=request.project, - zone=request.zone, - node_group=request.node_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetNodeTemplateNodeGroupRequest.to_json( + compute.SetNodeTemplateNodeGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetNodeTemplateNodeGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -957,10 +1367,12 @@ def set_node_template( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsNodeGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -971,6 +1383,9 @@ def test_iam_permissions( NodeGroups.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -979,31 +1394,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.TestIamPermissionsNodeGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsNodeGroupRequest.to_json( + compute.TestIamPermissionsNodeGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1016,5 +1463,80 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def add_nodes( + self, + ) -> Callable[[compute.AddNodesNodeGroupRequest], compute.Operation]: + return self._add_nodes + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListNodeGroupsRequest], compute.NodeGroupAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteNodeGroupRequest], compute.Operation]: + return self._delete + + @property + def delete_nodes( + self, + ) -> Callable[[compute.DeleteNodesNodeGroupRequest], compute.Operation]: + return self._delete_nodes + + @property + def get(self) -> Callable[[compute.GetNodeGroupRequest], compute.NodeGroup]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyNodeGroupRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertNodeGroupRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListNodeGroupsRequest], compute.NodeGroupList]: + return self._list + + @property + def list_nodes( + self, + ) -> Callable[[compute.ListNodesNodeGroupsRequest], compute.NodeGroupsListNodes]: + return self._list_nodes + + @property + def patch(self) -> Callable[[compute.PatchNodeGroupRequest], compute.Operation]: + return self._patch + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyNodeGroupRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_node_template( + self, + ) -> Callable[[compute.SetNodeTemplateNodeGroupRequest], compute.Operation]: + return self._set_node_template + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsNodeGroupRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("NodeGroupsRestTransport",) diff --git a/google/cloud/compute_v1/services/node_templates/client.py b/google/cloud/compute_v1/services/node_templates/client.py index 2b9adb54d..b4ad2397d 100644 --- a/google/cloud/compute_v1/services/node_templates/client.py +++ b/google/cloud/compute_v1/services/node_templates/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.node_templates import pagers from google.cloud.compute_v1.types import compute from .transports.base import NodeTemplatesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListNodeTemplatesRequest = None, + request: Union[compute.AggregatedListNodeTemplatesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of node templates. Args: - request (google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest, dict]): The request object. A request message for NodeTemplates.AggregatedList. See the method description for details. @@ -399,19 +411,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteNodeTemplateRequest = None, + request: Union[compute.DeleteNodeTemplateRequest, dict] = None, *, project: str = None, region: str = None, node_template: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified NodeTemplate resource. Args: - request (google.cloud.compute_v1.types.DeleteNodeTemplateRequest): + request (Union[google.cloud.compute_v1.types.DeleteNodeTemplateRequest, dict]): The request object. A request message for NodeTemplates.Delete. See the method description for details. @@ -496,12 +508,12 @@ def delete( def get( self, - request: compute.GetNodeTemplateRequest = None, + request: Union[compute.GetNodeTemplateRequest, dict] = None, *, project: str = None, region: str = None, node_template: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeTemplate: @@ -509,7 +521,7 @@ def get( available node templates by making a list() request. Args: - request (google.cloud.compute_v1.types.GetNodeTemplateRequest): + request (Union[google.cloud.compute_v1.types.GetNodeTemplateRequest, dict]): The request object. A request message for NodeTemplates.Get. See the method description for details. @@ -582,12 +594,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyNodeTemplateRequest = None, + request: Union[compute.GetIamPolicyNodeTemplateRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -595,7 +607,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyNodeTemplateRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyNodeTemplateRequest, dict]): The request object. A request message for NodeTemplates.GetIamPolicy. See the method description for details. @@ -704,12 +716,12 @@ def get_iam_policy( def insert( self, - request: compute.InsertNodeTemplateRequest = None, + request: Union[compute.InsertNodeTemplateRequest, dict] = None, *, project: str = None, region: str = None, node_template_resource: compute.NodeTemplate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -717,7 +729,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertNodeTemplateRequest): + request (Union[google.cloud.compute_v1.types.InsertNodeTemplateRequest, dict]): The request object. A request message for NodeTemplates.Insert. See the method description for details. @@ -800,11 +812,11 @@ def insert( def list( self, - request: compute.ListNodeTemplatesRequest = None, + request: Union[compute.ListNodeTemplatesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -812,7 +824,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListNodeTemplatesRequest): + request (Union[google.cloud.compute_v1.types.ListNodeTemplatesRequest, dict]): The request object. A request message for NodeTemplates.List. See the method description for details. @@ -883,13 +895,13 @@ def list( def set_iam_policy( self, - request: compute.SetIamPolicyNodeTemplateRequest = None, + request: Union[compute.SetIamPolicyNodeTemplateRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_policy_request_resource: compute.RegionSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -897,7 +909,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyNodeTemplateRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyNodeTemplateRequest, dict]): The request object. A request message for NodeTemplates.SetIamPolicy. See the method description for details. @@ -1017,13 +1029,13 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsNodeTemplateRequest = None, + request: Union[compute.TestIamPermissionsNodeTemplateRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1031,7 +1043,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsNodeTemplateRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsNodeTemplateRequest, dict]): The request object. A request message for NodeTemplates.TestIamPermissions. See the method description for details. @@ -1110,6 +1122,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/node_templates/pagers.py b/google/cloud/compute_v1/services/node_templates/pagers.py index 2e9f52ed7..3997cf83f 100644 --- a/google/cloud/compute_v1/services/node_templates/pagers.py +++ b/google/cloud/compute_v1/services/node_templates/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeTemplateAggregatedList]: + def pages(self) -> Iterator[compute.NodeTemplateAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.NodeTemplatesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.NodeTemplatesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeTemplateList]: + def pages(self) -> Iterator[compute.NodeTemplateList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NodeTemplate]: + def __iter__(self) -> Iterator[compute.NodeTemplate]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/node_templates/transports/base.py b/google/cloud/compute_v1/services/node_templates/transports/base.py index b53218c1d..59564f314 100644 --- a/google/cloud/compute_v1/services/node_templates/transports/base.py +++ b/google/cloud/compute_v1/services/node_templates/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class NodeTemplatesTransport(abc.ABC): """Abstract transport class for NodeTemplates.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -186,6 +150,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/node_templates/transports/rest.py b/google/cloud/compute_v1/services/node_templates/transports/rest.py index 0c895df6b..a08a6ce4f 100644 --- a/google/cloud/compute_v1/services/node_templates/transports/rest.py +++ b/google/cloud/compute_v1/services/node_templates/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + NodeTemplatesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import NodeTemplatesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class NodeTemplatesRestTransport(NodeTemplatesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListNodeTemplatesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeTemplateAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( NodeTemplates.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/nodeTemplates".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/nodeTemplates", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListNodeTemplatesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListNodeTemplatesRequest.to_json( + compute.AggregatedListNodeTemplatesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListNodeTemplatesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListNodeTemplatesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListNodeTemplatesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListNodeTemplatesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListNodeTemplatesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListNodeTemplatesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteNodeTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +225,9 @@ def delete( NodeTemplates.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +251,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}".format( - host=self._host, - project=request.project, - region=request.region, - node_template=request.node_template, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_template", "nodeTemplate"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteNodeTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteNodeTemplateRequest.to_json( + compute.DeleteNodeTemplateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteNodeTemplateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +308,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetNodeTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeTemplate: r"""Call the get method over HTTP. @@ -236,6 +324,9 @@ def get( NodeTemplates.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -249,23 +340,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}".format( - host=self._host, - project=request.project, - region=request.region, - node_template=request.node_template, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_template", "nodeTemplate"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetNodeTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetNodeTemplateRequest.to_json( + compute.GetNodeTemplateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -277,10 +399,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyNodeTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -291,6 +415,9 @@ def get_iam_policy( NodeTemplates.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -339,30 +466,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyNodeTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyNodeTemplateRequest.to_json( + compute.GetIamPolicyNodeTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyNodeTemplateRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -372,10 +525,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertNodeTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -386,6 +541,9 @@ def insert( NodeTemplates.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -409,30 +567,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates", + "body": "node_template_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertNodeTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NodeTemplate.to_json( - request.node_template_resource, + compute.NodeTemplate(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertNodeTemplateRequest.to_json( + compute.InsertNodeTemplateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertNodeTemplateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -443,10 +631,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListNodeTemplatesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeTemplateList: r"""Call the list method over HTTP. @@ -457,6 +647,9 @@ def list( NodeTemplates.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -465,30 +658,53 @@ def list( Contains a list of node templates. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListNodeTemplatesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNodeTemplatesRequest.to_json( + compute.ListNodeTemplatesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNodeTemplatesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListNodeTemplatesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListNodeTemplatesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListNodeTemplatesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListNodeTemplatesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -500,10 +716,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyNodeTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -514,6 +732,9 @@ def set_iam_policy( NodeTemplates.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -562,31 +783,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyNodeTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetPolicyRequest.to_json( - request.region_set_policy_request_resource, + compute.RegionSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyNodeTemplateRequest.to_json( + compute.SetIamPolicyNodeTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -597,10 +850,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsNodeTemplateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -611,6 +866,9 @@ def test_iam_permissions( NodeTemplates.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -619,31 +877,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsNodeTemplateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsNodeTemplateRequest.to_json( + compute.TestIamPermissionsNodeTemplateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -656,5 +946,58 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListNodeTemplatesRequest], compute.NodeTemplateAggregatedList + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteNodeTemplateRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetNodeTemplateRequest], compute.NodeTemplate]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyNodeTemplateRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert( + self, + ) -> Callable[[compute.InsertNodeTemplateRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListNodeTemplatesRequest], compute.NodeTemplateList]: + return self._list + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyNodeTemplateRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsNodeTemplateRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("NodeTemplatesRestTransport",) diff --git a/google/cloud/compute_v1/services/node_types/client.py b/google/cloud/compute_v1/services/node_types/client.py index 7db2005a4..749d6b7b7 100644 --- a/google/cloud/compute_v1/services/node_types/client.py +++ b/google/cloud/compute_v1/services/node_types/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.node_types import pagers from google.cloud.compute_v1.types import compute from .transports.base import NodeTypesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListNodeTypesRequest = None, + request: Union[compute.AggregatedListNodeTypesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of node types. Args: - request (google.cloud.compute_v1.types.AggregatedListNodeTypesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListNodeTypesRequest, dict]): The request object. A request message for NodeTypes.AggregatedList. See the method description for details. @@ -399,12 +411,12 @@ def aggregated_list( def get( self, - request: compute.GetNodeTypeRequest = None, + request: Union[compute.GetNodeTypeRequest, dict] = None, *, project: str = None, zone: str = None, node_type: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeType: @@ -412,7 +424,7 @@ def get( available node types by making a list() request. Args: - request (google.cloud.compute_v1.types.GetNodeTypeRequest): + request (Union[google.cloud.compute_v1.types.GetNodeTypeRequest, dict]): The request object. A request message for NodeTypes.Get. See the method description for details. project (str): @@ -489,11 +501,11 @@ def get( def list( self, - request: compute.ListNodeTypesRequest = None, + request: Union[compute.ListNodeTypesRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -501,7 +513,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListNodeTypesRequest): + request (Union[google.cloud.compute_v1.types.ListNodeTypesRequest, dict]): The request object. A request message for NodeTypes.List. See the method description for details. project (str): @@ -569,6 +581,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/node_types/pagers.py b/google/cloud/compute_v1/services/node_types/pagers.py index 37318acc4..b4879a59e 100644 --- a/google/cloud/compute_v1/services/node_types/pagers.py +++ b/google/cloud/compute_v1/services/node_types/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeTypeAggregatedList]: + def pages(self) -> Iterator[compute.NodeTypeAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.NodeTypesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.NodeTypesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NodeTypeList]: + def pages(self) -> Iterator[compute.NodeTypeList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NodeType]: + def __iter__(self) -> Iterator[compute.NodeType]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/node_types/transports/base.py b/google/cloud/compute_v1/services/node_types/transports/base.py index f82cea712..8d5a845af 100644 --- a/google/cloud/compute_v1/services/node_types/transports/base.py +++ b/google/cloud/compute_v1/services/node_types/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class NodeTypesTransport(abc.ABC): """Abstract transport class for NodeTypes.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -170,6 +134,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/node_types/transports/rest.py b/google/cloud/compute_v1/services/node_types/transports/rest.py index 2dd223e77..da2789cf3 100644 --- a/google/cloud/compute_v1/services/node_types/transports/rest.py +++ b/google/cloud/compute_v1/services/node_types/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import NodeTypesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import NodeTypesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class NodeTypesRestTransport(NodeTypesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListNodeTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeTypeAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( NodeTypes.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/nodeTypes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/nodeTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListNodeTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListNodeTypesRequest.to_json( + compute.AggregatedListNodeTypesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListNodeTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListNodeTypesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListNodeTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListNodeTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListNodeTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListNodeTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetNodeTypeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeType: r"""Call the get method over HTTP. @@ -170,6 +221,9 @@ def get( The request object. A request message for NodeTypes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -188,23 +242,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeTypes/{node_type}".format( - host=self._host, - project=request.project, - zone=request.zone, - node_type=request.node_type, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeTypes/{node_type}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("node_type", "nodeType"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetNodeTypeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetNodeTypeRequest.to_json( + compute.GetNodeTypeRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -214,10 +299,12 @@ def get( # Return the response return compute.NodeType.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListNodeTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NodeTypeList: r"""Call the list method over HTTP. @@ -227,6 +314,9 @@ def list( The request object. A request message for NodeTypes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -235,30 +325,53 @@ def list( Contains a list of node types. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/nodeTypes".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/nodeTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListNodeTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListNodeTypesRequest.to_json( + compute.ListNodeTypesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListNodeTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListNodeTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListNodeTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListNodeTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListNodeTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -270,5 +383,24 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListNodeTypesRequest], compute.NodeTypeAggregatedList + ]: + return self._aggregated_list + + @property + def get(self) -> Callable[[compute.GetNodeTypeRequest], compute.NodeType]: + return self._get + + @property + def list(self) -> Callable[[compute.ListNodeTypesRequest], compute.NodeTypeList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("NodeTypesRestTransport",) diff --git a/google/cloud/compute_v1/services/packet_mirrorings/client.py b/google/cloud/compute_v1/services/packet_mirrorings/client.py index b06aa5440..c28e5d436 100644 --- a/google/cloud/compute_v1/services/packet_mirrorings/client.py +++ b/google/cloud/compute_v1/services/packet_mirrorings/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.packet_mirrorings import pagers from google.cloud.compute_v1.types import compute from .transports.base import PacketMirroringsTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,21 +337,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListPacketMirroringsRequest = None, + request: Union[compute.AggregatedListPacketMirroringsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of packetMirrorings. Args: - request (google.cloud.compute_v1.types.AggregatedListPacketMirroringsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListPacketMirroringsRequest, dict]): The request object. A request message for PacketMirrorings.AggregatedList. See the method description for details. @@ -402,19 +414,19 @@ def aggregated_list( def delete( self, - request: compute.DeletePacketMirroringRequest = None, + request: Union[compute.DeletePacketMirroringRequest, dict] = None, *, project: str = None, region: str = None, packet_mirroring: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified PacketMirroring resource. Args: - request (google.cloud.compute_v1.types.DeletePacketMirroringRequest): + request (Union[google.cloud.compute_v1.types.DeletePacketMirroringRequest, dict]): The request object. A request message for PacketMirrorings.Delete. See the method description for details. @@ -497,19 +509,19 @@ def delete( def get( self, - request: compute.GetPacketMirroringRequest = None, + request: Union[compute.GetPacketMirroringRequest, dict] = None, *, project: str = None, region: str = None, packet_mirroring: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PacketMirroring: r"""Returns the specified PacketMirroring resource. Args: - request (google.cloud.compute_v1.types.GetPacketMirroringRequest): + request (Union[google.cloud.compute_v1.types.GetPacketMirroringRequest, dict]): The request object. A request message for PacketMirrorings.Get. See the method description for details. @@ -587,12 +599,12 @@ def get( def insert( self, - request: compute.InsertPacketMirroringRequest = None, + request: Union[compute.InsertPacketMirroringRequest, dict] = None, *, project: str = None, region: str = None, packet_mirroring_resource: compute.PacketMirroring = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -601,7 +613,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertPacketMirroringRequest): + request (Union[google.cloud.compute_v1.types.InsertPacketMirroringRequest, dict]): The request object. A request message for PacketMirrorings.Insert. See the method description for details. @@ -682,11 +694,11 @@ def insert( def list( self, - request: compute.ListPacketMirroringsRequest = None, + request: Union[compute.ListPacketMirroringsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -694,7 +706,7 @@ def list( available to the specified project and region. Args: - request (google.cloud.compute_v1.types.ListPacketMirroringsRequest): + request (Union[google.cloud.compute_v1.types.ListPacketMirroringsRequest, dict]): The request object. A request message for PacketMirrorings.List. See the method description for details. @@ -764,13 +776,13 @@ def list( def patch( self, - request: compute.PatchPacketMirroringRequest = None, + request: Union[compute.PatchPacketMirroringRequest, dict] = None, *, project: str = None, region: str = None, packet_mirroring: str = None, packet_mirroring_resource: compute.PacketMirroring = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -780,7 +792,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchPacketMirroringRequest): + request (Union[google.cloud.compute_v1.types.PatchPacketMirroringRequest, dict]): The request object. A request message for PacketMirrorings.Patch. See the method description for details. @@ -872,13 +884,13 @@ def patch( def test_iam_permissions( self, - request: compute.TestIamPermissionsPacketMirroringRequest = None, + request: Union[compute.TestIamPermissionsPacketMirroringRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -886,7 +898,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsPacketMirroringRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsPacketMirroringRequest, dict]): The request object. A request message for PacketMirrorings.TestIamPermissions. See the method description for details. @@ -965,6 +977,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/packet_mirrorings/pagers.py b/google/cloud/compute_v1/services/packet_mirrorings/pagers.py index ace55727c..d4e4578e3 100644 --- a/google/cloud/compute_v1/services/packet_mirrorings/pagers.py +++ b/google/cloud/compute_v1/services/packet_mirrorings/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.PacketMirroringAggregatedList]: + def pages(self) -> Iterator[compute.PacketMirroringAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.PacketMirroringsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.PacketMirroringsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.PacketMirroringList]: + def pages(self) -> Iterator[compute.PacketMirroringList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.PacketMirroring]: + def __iter__(self) -> Iterator[compute.PacketMirroring]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py b/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py index c4534cede..4d58091a5 100644 --- a/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py +++ b/google/cloud/compute_v1/services/packet_mirrorings/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class PacketMirroringsTransport(abc.ABC): """Abstract transport class for PacketMirrorings.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -183,6 +147,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py b/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py index 7aac687b4..1712cf772 100644 --- a/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py +++ b/google/cloud/compute_v1/services/packet_mirrorings/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + PacketMirroringsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import PacketMirroringsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class PacketMirroringsRestTransport(PacketMirroringsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListPacketMirroringsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PacketMirroringAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( PacketMirrorings.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( Contains a list of packetMirrorings. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/packetMirrorings".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/packetMirrorings", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListPacketMirroringsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListPacketMirroringsRequest.to_json( + compute.AggregatedListPacketMirroringsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListPacketMirroringsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListPacketMirroringsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListPacketMirroringsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListPacketMirroringsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListPacketMirroringsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListPacketMirroringsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeletePacketMirroringRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( PacketMirrorings.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,25 +251,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}".format( - host=self._host, - project=request.project, - region=request.region, - packet_mirroring=request.packet_mirroring, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("packet_mirroring", "packetMirroring"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeletePacketMirroringRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeletePacketMirroringRequest.to_json( + compute.DeletePacketMirroringRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeletePacketMirroringRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +310,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetPacketMirroringRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PacketMirroring: r"""Call the get method over HTTP. @@ -239,6 +326,9 @@ def get( PacketMirrorings.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -257,23 +347,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}".format( - host=self._host, - project=request.project, - region=request.region, - packet_mirroring=request.packet_mirroring, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("packet_mirroring", "packetMirroring"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetPacketMirroringRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetPacketMirroringRequest.to_json( + compute.GetPacketMirroringRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -285,10 +406,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertPacketMirroringRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -299,6 +422,9 @@ def insert( PacketMirrorings.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -322,30 +448,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/packetMirrorings", + "body": "packet_mirroring_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertPacketMirroringRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PacketMirroring.to_json( - request.packet_mirroring_resource, + compute.PacketMirroring(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/packetMirrorings".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertPacketMirroringRequest.to_json( + compute.InsertPacketMirroringRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertPacketMirroringRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -356,10 +514,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListPacketMirroringsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PacketMirroringList: r"""Call the list method over HTTP. @@ -370,6 +530,9 @@ def list( PacketMirrorings.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -380,30 +543,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/packetMirrorings".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/packetMirrorings", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListPacketMirroringsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPacketMirroringsRequest.to_json( + compute.ListPacketMirroringsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListPacketMirroringsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListPacketMirroringsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListPacketMirroringsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListPacketMirroringsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListPacketMirroringsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -415,10 +601,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchPacketMirroringRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -429,6 +617,9 @@ def patch( PacketMirrorings.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -452,33 +643,61 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}", + "body": "packet_mirroring_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("packet_mirroring", "packetMirroring"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchPacketMirroringRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PacketMirroring.to_json( - request.packet_mirroring_resource, + compute.PacketMirroring(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}".format( - host=self._host, - project=request.project, - region=request.region, - packet_mirroring=request.packet_mirroring, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchPacketMirroringRequest.to_json( + compute.PatchPacketMirroringRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchPacketMirroringRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -489,10 +708,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsPacketMirroringRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -503,6 +724,9 @@ def test_iam_permissions( PacketMirrorings.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -511,31 +735,65 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsPacketMirroringRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsPacketMirroringRequest.to_json( + compute.TestIamPermissionsPacketMirroringRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -548,5 +806,56 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListPacketMirroringsRequest], + compute.PacketMirroringAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeletePacketMirroringRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetPacketMirroringRequest], compute.PacketMirroring]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertPacketMirroringRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListPacketMirroringsRequest], compute.PacketMirroringList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchPacketMirroringRequest], compute.Operation]: + return self._patch + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsPacketMirroringRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("PacketMirroringsRestTransport",) diff --git a/google/cloud/compute_v1/services/projects/client.py b/google/cloud/compute_v1/services/projects/client.py index 71dd48e7a..01ab7ad39 100644 --- a/google/cloud/compute_v1/services/projects/client.py +++ b/google/cloud/compute_v1/services/projects/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.projects import pagers from google.cloud.compute_v1.types import compute from .transports.base import ProjectsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def disable_xpn_host( self, - request: compute.DisableXpnHostProjectRequest = None, + request: Union[compute.DisableXpnHostProjectRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Disable this project as a shared VPC host project. Args: - request (google.cloud.compute_v1.types.DisableXpnHostProjectRequest): + request (Union[google.cloud.compute_v1.types.DisableXpnHostProjectRequest, dict]): The request object. A request message for Projects.DisableXpnHost. See the method description for details. @@ -405,11 +417,11 @@ def disable_xpn_host( def disable_xpn_resource( self, - request: compute.DisableXpnResourceProjectRequest = None, + request: Union[compute.DisableXpnResourceProjectRequest, dict] = None, *, project: str = None, projects_disable_xpn_resource_request_resource: compute.ProjectsDisableXpnResourceRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -417,7 +429,7 @@ def disable_xpn_resource( project) associated with this host project. Args: - request (google.cloud.compute_v1.types.DisableXpnResourceProjectRequest): + request (Union[google.cloud.compute_v1.types.DisableXpnResourceProjectRequest, dict]): The request object. A request message for Projects.DisableXpnResource. See the method description for details. @@ -495,17 +507,17 @@ def disable_xpn_resource( def enable_xpn_host( self, - request: compute.EnableXpnHostProjectRequest = None, + request: Union[compute.EnableXpnHostProjectRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Enable this project as a shared VPC host project. Args: - request (google.cloud.compute_v1.types.EnableXpnHostProjectRequest): + request (Union[google.cloud.compute_v1.types.EnableXpnHostProjectRequest, dict]): The request object. A request message for Projects.EnableXpnHost. See the method description for details. @@ -572,11 +584,11 @@ def enable_xpn_host( def enable_xpn_resource( self, - request: compute.EnableXpnResourceProjectRequest = None, + request: Union[compute.EnableXpnResourceProjectRequest, dict] = None, *, project: str = None, projects_enable_xpn_resource_request_resource: compute.ProjectsEnableXpnResourceRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -585,7 +597,7 @@ def enable_xpn_resource( used by instances in the service project. Args: - request (google.cloud.compute_v1.types.EnableXpnResourceProjectRequest): + request (Union[google.cloud.compute_v1.types.EnableXpnResourceProjectRequest, dict]): The request object. A request message for Projects.EnableXpnResource. See the method description for details. @@ -663,17 +675,17 @@ def enable_xpn_resource( def get( self, - request: compute.GetProjectRequest = None, + request: Union[compute.GetProjectRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Project: r"""Returns the specified Project resource. Args: - request (google.cloud.compute_v1.types.GetProjectRequest): + request (Union[google.cloud.compute_v1.types.GetProjectRequest, dict]): The request object. A request message for Projects.Get. See the method description for details. project (str): @@ -729,10 +741,10 @@ def get( def get_xpn_host( self, - request: compute.GetXpnHostProjectRequest = None, + request: Union[compute.GetXpnHostProjectRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Project: @@ -740,7 +752,7 @@ def get_xpn_host( links to. May be empty if no link exists. Args: - request (google.cloud.compute_v1.types.GetXpnHostProjectRequest): + request (Union[google.cloud.compute_v1.types.GetXpnHostProjectRequest, dict]): The request object. A request message for Projects.GetXpnHost. See the method description for details. @@ -797,10 +809,10 @@ def get_xpn_host( def get_xpn_resources( self, - request: compute.GetXpnResourcesProjectsRequest = None, + request: Union[compute.GetXpnResourcesProjectsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.GetXpnResourcesPager: @@ -808,7 +820,7 @@ def get_xpn_resources( associated with this host project. Args: - request (google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest): + request (Union[google.cloud.compute_v1.types.GetXpnResourcesProjectsRequest, dict]): The request object. A request message for Projects.GetXpnResources. See the method description for details. @@ -869,11 +881,11 @@ def get_xpn_resources( def list_xpn_hosts( self, - request: compute.ListXpnHostsProjectsRequest = None, + request: Union[compute.ListXpnHostsProjectsRequest, dict] = None, *, project: str = None, projects_list_xpn_hosts_request_resource: compute.ProjectsListXpnHostsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListXpnHostsPager: @@ -881,7 +893,7 @@ def list_xpn_hosts( user in an organization. Args: - request (google.cloud.compute_v1.types.ListXpnHostsProjectsRequest): + request (Union[google.cloud.compute_v1.types.ListXpnHostsProjectsRequest, dict]): The request object. A request message for Projects.ListXpnHosts. See the method description for details. @@ -951,18 +963,18 @@ def list_xpn_hosts( def move_disk( self, - request: compute.MoveDiskProjectRequest = None, + request: Union[compute.MoveDiskProjectRequest, dict] = None, *, project: str = None, disk_move_request_resource: compute.DiskMoveRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Moves a persistent disk from one zone to another. Args: - request (google.cloud.compute_v1.types.MoveDiskProjectRequest): + request (Union[google.cloud.compute_v1.types.MoveDiskProjectRequest, dict]): The request object. A request message for Projects.MoveDisk. See the method description for details. @@ -1036,11 +1048,11 @@ def move_disk( def move_instance( self, - request: compute.MoveInstanceProjectRequest = None, + request: Union[compute.MoveInstanceProjectRequest, dict] = None, *, project: str = None, instance_move_request_resource: compute.InstanceMoveRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1048,7 +1060,7 @@ def move_instance( from one zone to another. Args: - request (google.cloud.compute_v1.types.MoveInstanceProjectRequest): + request (Union[google.cloud.compute_v1.types.MoveInstanceProjectRequest, dict]): The request object. A request message for Projects.MoveInstance. See the method description for details. @@ -1122,11 +1134,11 @@ def move_instance( def set_common_instance_metadata( self, - request: compute.SetCommonInstanceMetadataProjectRequest = None, + request: Union[compute.SetCommonInstanceMetadataProjectRequest, dict] = None, *, project: str = None, metadata_resource: compute.Metadata = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1135,7 +1147,7 @@ def set_common_instance_metadata( request. Args: - request (google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest): + request (Union[google.cloud.compute_v1.types.SetCommonInstanceMetadataProjectRequest, dict]): The request object. A request message for Projects.SetCommonInstanceMetadata. See the method description for details. @@ -1211,11 +1223,11 @@ def set_common_instance_metadata( def set_default_network_tier( self, - request: compute.SetDefaultNetworkTierProjectRequest = None, + request: Union[compute.SetDefaultNetworkTierProjectRequest, dict] = None, *, project: str = None, projects_set_default_network_tier_request_resource: compute.ProjectsSetDefaultNetworkTierRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1225,7 +1237,7 @@ def set_default_network_tier( specifying the network tier field. Args: - request (google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest): + request (Union[google.cloud.compute_v1.types.SetDefaultNetworkTierProjectRequest, dict]): The request object. A request message for Projects.SetDefaultNetworkTier. See the method description for details. @@ -1303,11 +1315,11 @@ def set_default_network_tier( def set_usage_export_bucket( self, - request: compute.SetUsageExportBucketProjectRequest = None, + request: Union[compute.SetUsageExportBucketProjectRequest, dict] = None, *, project: str = None, usage_export_location_resource: compute.UsageExportLocation = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1317,7 +1329,7 @@ def set_usage_export_bucket( export feature will be disabled. Args: - request (google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest): + request (Union[google.cloud.compute_v1.types.SetUsageExportBucketProjectRequest, dict]): The request object. A request message for Projects.SetUsageExportBucket. See the method description for details. @@ -1389,6 +1401,19 @@ def set_usage_export_bucket( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/projects/pagers.py b/google/cloud/compute_v1/services/projects/pagers.py index e0d6689ef..04dc288dd 100644 --- a/google/cloud/compute_v1/services/projects/pagers.py +++ b/google/cloud/compute_v1/services/projects/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ProjectsGetXpnResources]: + def pages(self) -> Iterator[compute.ProjectsGetXpnResources]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.XpnResourceId]: + def __iter__(self) -> Iterator[compute.XpnResourceId]: for page in self.pages: yield from page.resources @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.XpnHostList]: + def pages(self) -> Iterator[compute.XpnHostList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Project]: + def __iter__(self) -> Iterator[compute.Project]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/projects/transports/base.py b/google/cloud/compute_v1/services/projects/transports/base.py index e3960b084..9f0ef02de 100644 --- a/google/cloud/compute_v1/services/projects/transports/base.py +++ b/google/cloud/compute_v1/services/projects/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ProjectsTransport(abc.ABC): """Abstract transport class for Projects.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -207,6 +171,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def disable_xpn_host( self, diff --git a/google/cloud/compute_v1/services/projects/transports/rest.py b/google/cloud/compute_v1/services/projects/transports/rest.py index 641e2b1b0..6a20677ea 100644 --- a/google/cloud/compute_v1/services/projects/transports/rest.py +++ b/google/cloud/compute_v1/services/projects/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ProjectsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import ProjectsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ProjectsRestTransport(ProjectsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def disable_xpn_host( + def _disable_xpn_host( self, request: compute.DisableXpnHostProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the disable xpn host method over HTTP. @@ -112,6 +136,9 @@ def disable_xpn_host( Projects.DisableXpnHost. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,22 +162,51 @@ def disable_xpn_host( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/disableXpnHost".format( - host=self._host, project=request.project, + http_options = [ + {"method": "post", "uri": "/compute/v1/projects/{project}/disableXpnHost",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.DisableXpnHostProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DisableXpnHostProjectRequest.to_json( + compute.DisableXpnHostProjectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DisableXpnHostProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +216,12 @@ def disable_xpn_host( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def disable_xpn_resource( + def _disable_xpn_resource( self, request: compute.DisableXpnResourceProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the disable xpn resource method over HTTP. @@ -174,6 +232,9 @@ def disable_xpn_resource( Projects.DisableXpnResource. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,30 +258,61 @@ def disable_xpn_resource( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/disableXpnResource", + "body": "projects_disable_xpn_resource_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.DisableXpnResourceProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ProjectsDisableXpnResourceRequest.to_json( - request.projects_disable_xpn_resource_request_resource, + compute.ProjectsDisableXpnResourceRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/disableXpnResource".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DisableXpnResourceProjectRequest.to_json( + compute.DisableXpnResourceProjectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DisableXpnResourceProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -231,10 +323,12 @@ def disable_xpn_resource( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def enable_xpn_host( + def _enable_xpn_host( self, request: compute.EnableXpnHostProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the enable xpn host method over HTTP. @@ -245,6 +339,9 @@ def enable_xpn_host( Projects.EnableXpnHost. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -268,22 +365,49 @@ def enable_xpn_host( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/enableXpnHost".format( - host=self._host, project=request.project, + http_options = [ + {"method": "post", "uri": "/compute/v1/projects/{project}/enableXpnHost",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.EnableXpnHostProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.EnableXpnHostProjectRequest.to_json( + compute.EnableXpnHostProjectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.EnableXpnHostProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -293,10 +417,12 @@ def enable_xpn_host( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def enable_xpn_resource( + def _enable_xpn_resource( self, request: compute.EnableXpnResourceProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the enable xpn resource method over HTTP. @@ -307,6 +433,9 @@ def enable_xpn_resource( Projects.EnableXpnResource. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -330,30 +459,61 @@ def enable_xpn_resource( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/enableXpnResource", + "body": "projects_enable_xpn_resource_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.EnableXpnResourceProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ProjectsEnableXpnResourceRequest.to_json( - request.projects_enable_xpn_resource_request_resource, + compute.ProjectsEnableXpnResourceRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/enableXpnResource".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.EnableXpnResourceProjectRequest.to_json( + compute.EnableXpnResourceProjectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.EnableXpnResourceProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -364,10 +524,12 @@ def enable_xpn_resource( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Project: r"""Call the get method over HTTP. @@ -377,6 +539,9 @@ def get( The request object. A request message for Projects.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -390,20 +555,49 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.GetProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetProjectRequest.to_json( + compute.GetProjectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -413,10 +607,12 @@ def get( # Return the response return compute.Project.from_json(response.content, ignore_unknown_fields=True) - def get_xpn_host( + def _get_xpn_host( self, request: compute.GetXpnHostProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Project: r"""Call the get xpn host method over HTTP. @@ -427,6 +623,9 @@ def get_xpn_host( Projects.GetXpnHost. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -440,20 +639,49 @@ def get_xpn_host( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/getXpnHost".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/getXpnHost",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.GetXpnHostProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetXpnHostProjectRequest.to_json( + compute.GetXpnHostProjectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -463,10 +691,12 @@ def get_xpn_host( # Return the response return compute.Project.from_json(response.content, ignore_unknown_fields=True) - def get_xpn_resources( + def _get_xpn_resources( self, request: compute.GetXpnResourcesProjectsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ProjectsGetXpnResources: r"""Call the get xpn resources method over HTTP. @@ -477,6 +707,9 @@ def get_xpn_resources( Projects.GetXpnResources. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -485,30 +718,51 @@ def get_xpn_resources( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/getXpnResources".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetXpnResourcesProjectsRequest.filter in request: - query_params["filter"] = request.filter - if compute.GetXpnResourcesProjectsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.GetXpnResourcesProjectsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.GetXpnResourcesProjectsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.GetXpnResourcesProjectsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/getXpnResources",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.GetXpnResourcesProjectsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetXpnResourcesProjectsRequest.to_json( + compute.GetXpnResourcesProjectsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -520,10 +774,12 @@ def get_xpn_resources( response.content, ignore_unknown_fields=True ) - def list_xpn_hosts( + def _list_xpn_hosts( self, request: compute.ListXpnHostsProjectsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.XpnHostList: r"""Call the list xpn hosts method over HTTP. @@ -534,6 +790,9 @@ def list_xpn_hosts( Projects.ListXpnHosts. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -542,38 +801,59 @@ def list_xpn_hosts( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/listXpnHosts", + "body": "projects_list_xpn_hosts_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListXpnHostsProjectsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ProjectsListXpnHostsRequest.to_json( - request.projects_list_xpn_hosts_request_resource, + compute.ProjectsListXpnHostsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/listXpnHosts".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListXpnHostsProjectsRequest.to_json( + compute.ListXpnHostsProjectsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListXpnHostsProjectsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListXpnHostsProjectsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListXpnHostsProjectsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListXpnHostsProjectsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListXpnHostsProjectsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -586,10 +866,12 @@ def list_xpn_hosts( response.content, ignore_unknown_fields=True ) - def move_disk( + def _move_disk( self, request: compute.MoveDiskProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the move disk method over HTTP. @@ -600,6 +882,9 @@ def move_disk( Projects.MoveDisk. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -623,30 +908,59 @@ def move_disk( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/moveDisk", + "body": "disk_move_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.MoveDiskProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.DiskMoveRequest.to_json( - request.disk_move_request_resource, + compute.DiskMoveRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/moveDisk".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.MoveDiskProjectRequest.to_json( + compute.MoveDiskProjectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.MoveDiskProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -657,10 +971,12 @@ def move_disk( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def move_instance( + def _move_instance( self, request: compute.MoveInstanceProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the move instance method over HTTP. @@ -671,6 +987,9 @@ def move_instance( Projects.MoveInstance. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -694,30 +1013,59 @@ def move_instance( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/moveInstance", + "body": "instance_move_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.MoveInstanceProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceMoveRequest.to_json( - request.instance_move_request_resource, + compute.InstanceMoveRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/moveInstance".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.MoveInstanceProjectRequest.to_json( + compute.MoveInstanceProjectRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.MoveInstanceProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -728,10 +1076,12 @@ def move_instance( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_common_instance_metadata( + def _set_common_instance_metadata( self, request: compute.SetCommonInstanceMetadataProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set common instance @@ -743,6 +1093,9 @@ def set_common_instance_metadata( Projects.SetCommonInstanceMetadata. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -766,30 +1119,63 @@ def set_common_instance_metadata( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/setCommonInstanceMetadata", + "body": "metadata_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.SetCommonInstanceMetadataProjectRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Metadata.to_json( - request.metadata_resource, + compute.Metadata(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/setCommonInstanceMetadata".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetCommonInstanceMetadataProjectRequest.to_json( + compute.SetCommonInstanceMetadataProjectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetCommonInstanceMetadataProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -800,10 +1186,12 @@ def set_common_instance_metadata( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_default_network_tier( + def _set_default_network_tier( self, request: compute.SetDefaultNetworkTierProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set default network tier method over HTTP. @@ -814,6 +1202,9 @@ def set_default_network_tier( Projects.SetDefaultNetworkTier. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -837,30 +1228,61 @@ def set_default_network_tier( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/setDefaultNetworkTier", + "body": "projects_set_default_network_tier_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.SetDefaultNetworkTierProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ProjectsSetDefaultNetworkTierRequest.to_json( - request.projects_set_default_network_tier_request_resource, + compute.ProjectsSetDefaultNetworkTierRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/setDefaultNetworkTier".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetDefaultNetworkTierProjectRequest.to_json( + compute.SetDefaultNetworkTierProjectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetDefaultNetworkTierProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -871,10 +1293,12 @@ def set_default_network_tier( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_usage_export_bucket( + def _set_usage_export_bucket( self, request: compute.SetUsageExportBucketProjectRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set usage export bucket method over HTTP. @@ -885,6 +1309,9 @@ def set_usage_export_bucket( Projects.SetUsageExportBucket. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -908,30 +1335,61 @@ def set_usage_export_bucket( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/setUsageExportBucket", + "body": "usage_export_location_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.SetUsageExportBucketProjectRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UsageExportLocation.to_json( - request.usage_export_location_resource, + compute.UsageExportLocation(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/setUsageExportBucket".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetUsageExportBucketProjectRequest.to_json( + compute.SetUsageExportBucketProjectRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetUsageExportBucketProjectRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -942,5 +1400,86 @@ def set_usage_export_bucket( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def disable_xpn_host( + self, + ) -> Callable[[compute.DisableXpnHostProjectRequest], compute.Operation]: + return self._disable_xpn_host + + @property + def disable_xpn_resource( + self, + ) -> Callable[[compute.DisableXpnResourceProjectRequest], compute.Operation]: + return self._disable_xpn_resource + + @property + def enable_xpn_host( + self, + ) -> Callable[[compute.EnableXpnHostProjectRequest], compute.Operation]: + return self._enable_xpn_host + + @property + def enable_xpn_resource( + self, + ) -> Callable[[compute.EnableXpnResourceProjectRequest], compute.Operation]: + return self._enable_xpn_resource + + @property + def get(self) -> Callable[[compute.GetProjectRequest], compute.Project]: + return self._get + + @property + def get_xpn_host( + self, + ) -> Callable[[compute.GetXpnHostProjectRequest], compute.Project]: + return self._get_xpn_host + + @property + def get_xpn_resources( + self, + ) -> Callable[ + [compute.GetXpnResourcesProjectsRequest], compute.ProjectsGetXpnResources + ]: + return self._get_xpn_resources + + @property + def list_xpn_hosts( + self, + ) -> Callable[[compute.ListXpnHostsProjectsRequest], compute.XpnHostList]: + return self._list_xpn_hosts + + @property + def move_disk( + self, + ) -> Callable[[compute.MoveDiskProjectRequest], compute.Operation]: + return self._move_disk + + @property + def move_instance( + self, + ) -> Callable[[compute.MoveInstanceProjectRequest], compute.Operation]: + return self._move_instance + + @property + def set_common_instance_metadata( + self, + ) -> Callable[[compute.SetCommonInstanceMetadataProjectRequest], compute.Operation]: + return self._set_common_instance_metadata + + @property + def set_default_network_tier( + self, + ) -> Callable[[compute.SetDefaultNetworkTierProjectRequest], compute.Operation]: + return self._set_default_network_tier + + @property + def set_usage_export_bucket( + self, + ) -> Callable[[compute.SetUsageExportBucketProjectRequest], compute.Operation]: + return self._set_usage_export_bucket + + def close(self): + self._session.close() + __all__ = ("ProjectsRestTransport",) diff --git a/google/cloud/compute_v1/services/public_advertised_prefixes/client.py b/google/cloud/compute_v1/services/public_advertised_prefixes/client.py index 8659f3009..7064f947c 100644 --- a/google/cloud/compute_v1/services/public_advertised_prefixes/client.py +++ b/google/cloud/compute_v1/services/public_advertised_prefixes/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.public_advertised_prefixes import pagers from google.cloud.compute_v1.types import compute from .transports.base import PublicAdvertisedPrefixesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,22 +339,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeletePublicAdvertisedPrefixeRequest = None, + request: Union[compute.DeletePublicAdvertisedPrefixeRequest, dict] = None, *, project: str = None, public_advertised_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified PublicAdvertisedPrefix Args: - request (google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.DeletePublicAdvertisedPrefixeRequest, dict]): The request object. A request message for PublicAdvertisedPrefixes.Delete. See the method description for details. @@ -419,11 +431,11 @@ def delete( def get( self, - request: compute.GetPublicAdvertisedPrefixeRequest = None, + request: Union[compute.GetPublicAdvertisedPrefixeRequest, dict] = None, *, project: str = None, public_advertised_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicAdvertisedPrefix: @@ -431,7 +443,7 @@ def get( resource. Args: - request (google.cloud.compute_v1.types.GetPublicAdvertisedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.GetPublicAdvertisedPrefixeRequest, dict]): The request object. A request message for PublicAdvertisedPrefixes.Get. See the method description for details. @@ -498,11 +510,11 @@ def get( def insert( self, - request: compute.InsertPublicAdvertisedPrefixeRequest = None, + request: Union[compute.InsertPublicAdvertisedPrefixeRequest, dict] = None, *, project: str = None, public_advertised_prefix_resource: compute.PublicAdvertisedPrefix = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -511,7 +523,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.InsertPublicAdvertisedPrefixeRequest, dict]): The request object. A request message for PublicAdvertisedPrefixes.Insert. See the method description for details. @@ -587,17 +599,17 @@ def insert( def list( self, - request: compute.ListPublicAdvertisedPrefixesRequest = None, + request: Union[compute.ListPublicAdvertisedPrefixesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: r"""Lists the PublicAdvertisedPrefixes for a project. Args: - request (google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest): + request (Union[google.cloud.compute_v1.types.ListPublicAdvertisedPrefixesRequest, dict]): The request object. A request message for PublicAdvertisedPrefixes.List. See the method description for details. @@ -658,12 +670,12 @@ def list( def patch( self, - request: compute.PatchPublicAdvertisedPrefixeRequest = None, + request: Union[compute.PatchPublicAdvertisedPrefixeRequest, dict] = None, *, project: str = None, public_advertised_prefix: str = None, public_advertised_prefix_resource: compute.PublicAdvertisedPrefix = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -673,7 +685,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.PatchPublicAdvertisedPrefixeRequest, dict]): The request object. A request message for PublicAdvertisedPrefixes.Patch. See the method description for details. @@ -758,6 +770,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py b/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py index 65f3e02d8..1a8da10e5 100644 --- a/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py +++ b/google/cloud/compute_v1/services/public_advertised_prefixes/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.PublicAdvertisedPrefixList]: + def pages(self) -> Iterator[compute.PublicAdvertisedPrefixList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.PublicAdvertisedPrefix]: + def __iter__(self) -> Iterator[compute.PublicAdvertisedPrefix]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py b/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py index 69ff5553d..635bd8df0 100644 --- a/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py +++ b/google/cloud/compute_v1/services/public_advertised_prefixes/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class PublicAdvertisedPrefixesTransport(abc.ABC): """Abstract transport class for PublicAdvertisedPrefixes.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py b/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py index edf3c3a09..6b37ad63a 100644 --- a/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py +++ b/google/cloud/compute_v1/services/public_advertised_prefixes/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + PublicAdvertisedPrefixesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import PublicAdvertisedPrefixesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class PublicAdvertisedPrefixesRestTransport(PublicAdvertisedPrefixesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeletePublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( PublicAdvertisedPrefixes.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}".format( - host=self._host, - project=request.project, - public_advertised_prefix=request.public_advertised_prefix, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_advertised_prefix", "publicAdvertisedPrefix"), + ] + + request_kwargs = compute.DeletePublicAdvertisedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeletePublicAdvertisedPrefixeRequest.to_json( + compute.DeletePublicAdvertisedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeletePublicAdvertisedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +223,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetPublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicAdvertisedPrefix: r"""Call the get method over HTTP. @@ -176,6 +239,9 @@ def get( PublicAdvertisedPrefixes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -190,22 +256,55 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}".format( - host=self._host, - project=request.project, - public_advertised_prefix=request.public_advertised_prefix, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_advertised_prefix", "publicAdvertisedPrefix"), + ] + + request_kwargs = compute.GetPublicAdvertisedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetPublicAdvertisedPrefixeRequest.to_json( + compute.GetPublicAdvertisedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -217,10 +316,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertPublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -231,6 +332,9 @@ def insert( PublicAdvertisedPrefixes.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -254,30 +358,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes", + "body": "public_advertised_prefix_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertPublicAdvertisedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PublicAdvertisedPrefix.to_json( - request.public_advertised_prefix_resource, + compute.PublicAdvertisedPrefix(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicAdvertisedPrefixes".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertPublicAdvertisedPrefixeRequest.to_json( + compute.InsertPublicAdvertisedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertPublicAdvertisedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -288,10 +423,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListPublicAdvertisedPrefixesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicAdvertisedPrefixList: r"""Call the list method over HTTP. @@ -302,6 +439,9 @@ def list( PublicAdvertisedPrefixes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -310,33 +450,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicAdvertisedPrefixes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListPublicAdvertisedPrefixesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPublicAdvertisedPrefixesRequest.to_json( + compute.ListPublicAdvertisedPrefixesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListPublicAdvertisedPrefixesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListPublicAdvertisedPrefixesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListPublicAdvertisedPrefixesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListPublicAdvertisedPrefixesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListPublicAdvertisedPrefixesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -348,10 +509,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchPublicAdvertisedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -362,6 +525,9 @@ def patch( PublicAdvertisedPrefixes.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -385,32 +551,62 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}", + "body": "public_advertised_prefix_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_advertised_prefix", "publicAdvertisedPrefix"), + ] + + request_kwargs = compute.PatchPublicAdvertisedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PublicAdvertisedPrefix.to_json( - request.public_advertised_prefix_resource, + compute.PublicAdvertisedPrefix(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}".format( - host=self._host, - project=request.project, - public_advertised_prefix=request.public_advertised_prefix, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchPublicAdvertisedPrefixeRequest.to_json( + compute.PatchPublicAdvertisedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchPublicAdvertisedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -421,5 +617,43 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeletePublicAdvertisedPrefixeRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetPublicAdvertisedPrefixeRequest], compute.PublicAdvertisedPrefix + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertPublicAdvertisedPrefixeRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListPublicAdvertisedPrefixesRequest], + compute.PublicAdvertisedPrefixList, + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchPublicAdvertisedPrefixeRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("PublicAdvertisedPrefixesRestTransport",) diff --git a/google/cloud/compute_v1/services/public_delegated_prefixes/client.py b/google/cloud/compute_v1/services/public_delegated_prefixes/client.py index 4f3aed629..f8c019bac 100644 --- a/google/cloud/compute_v1/services/public_delegated_prefixes/client.py +++ b/google/cloud/compute_v1/services/public_delegated_prefixes/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.public_delegated_prefixes import pagers from google.cloud.compute_v1.types import compute from .transports.base import PublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListPublicDelegatedPrefixesRequest = None, + request: Union[ + compute.AggregatedListPublicDelegatedPrefixesRequest, dict + ] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -343,7 +357,7 @@ def aggregated_list( the specific project across all scopes. Args: - request (google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListPublicDelegatedPrefixesRequest, dict]): The request object. A request message for PublicDelegatedPrefixes.AggregatedList. See the method description for details. @@ -408,12 +422,12 @@ def aggregated_list( def delete( self, - request: compute.DeletePublicDelegatedPrefixeRequest = None, + request: Union[compute.DeletePublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, region: str = None, public_delegated_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -421,7 +435,7 @@ def delete( given region. Args: - request (google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.DeletePublicDelegatedPrefixeRequest, dict]): The request object. A request message for PublicDelegatedPrefixes.Delete. See the method description for details. @@ -504,12 +518,12 @@ def delete( def get( self, - request: compute.GetPublicDelegatedPrefixeRequest = None, + request: Union[compute.GetPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, region: str = None, public_delegated_prefix: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefix: @@ -517,7 +531,7 @@ def get( in the given region. Args: - request (google.cloud.compute_v1.types.GetPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.GetPublicDelegatedPrefixeRequest, dict]): The request object. A request message for PublicDelegatedPrefixes.Get. See the method description for details. @@ -594,12 +608,12 @@ def get( def insert( self, - request: compute.InsertPublicDelegatedPrefixeRequest = None, + request: Union[compute.InsertPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, region: str = None, public_delegated_prefix_resource: compute.PublicDelegatedPrefix = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -608,7 +622,7 @@ def insert( are included in the request. Args: - request (google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest, dict]): The request object. A request message for PublicDelegatedPrefixes.Insert. See the method description for details. @@ -691,11 +705,11 @@ def insert( def list( self, - request: compute.ListPublicDelegatedPrefixesRequest = None, + request: Union[compute.ListPublicDelegatedPrefixesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -703,7 +717,7 @@ def list( the given region. Args: - request (google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest): + request (Union[google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest, dict]): The request object. A request message for PublicDelegatedPrefixes.List. See the method description for details. @@ -771,13 +785,13 @@ def list( def patch( self, - request: compute.PatchPublicDelegatedPrefixeRequest = None, + request: Union[compute.PatchPublicDelegatedPrefixeRequest, dict] = None, *, project: str = None, region: str = None, public_delegated_prefix: str = None, public_delegated_prefix_resource: compute.PublicDelegatedPrefix = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -787,7 +801,7 @@ def patch( format and processing rules. Args: - request (google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest): + request (Union[google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest, dict]): The request object. A request message for PublicDelegatedPrefixes.Patch. See the method description for details. @@ -879,6 +893,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py b/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py index c84a6c416..da0ee818f 100644 --- a/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py +++ b/google/cloud/compute_v1/services/public_delegated_prefixes/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,7 +74,7 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.PublicDelegatedPrefixAggregatedList]: + def pages(self) -> Iterator[compute.PublicDelegatedPrefixAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token @@ -83,7 +83,7 @@ def pages(self) -> Iterable[compute.PublicDelegatedPrefixAggregatedList]: def __iter__( self, - ) -> Iterable[Tuple[str, compute.PublicDelegatedPrefixesScopedList]]: + ) -> Iterator[Tuple[str, compute.PublicDelegatedPrefixesScopedList]]: for page in self.pages: yield from page.items.items() @@ -141,14 +141,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.PublicDelegatedPrefixList]: + def pages(self) -> Iterator[compute.PublicDelegatedPrefixList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.PublicDelegatedPrefix]: + def __iter__(self) -> Iterator[compute.PublicDelegatedPrefix]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py b/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py index df0d122fc..f7bea20c5 100644 --- a/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py +++ b/google/cloud/compute_v1/services/public_delegated_prefixes/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class PublicDelegatedPrefixesTransport(abc.ABC): """Abstract transport class for PublicDelegatedPrefixes.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py b/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py index bf91e9b0c..1c5a000b1 100644 --- a/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py +++ b/google/cloud/compute_v1/services/public_delegated_prefixes/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + PublicDelegatedPrefixesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import PublicDelegatedPrefixesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class PublicDelegatedPrefixesRestTransport(PublicDelegatedPrefixesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListPublicDelegatedPrefixesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefixAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( PublicDelegatedPrefixes.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,38 +150,56 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/publicDelegatedPrefixes".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/publicDelegatedPrefixes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListPublicDelegatedPrefixesRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListPublicDelegatedPrefixesRequest.to_json( + compute.AggregatedListPublicDelegatedPrefixesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListPublicDelegatedPrefixesRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.AggregatedListPublicDelegatedPrefixesRequest.include_all_scopes - in request - ): - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListPublicDelegatedPrefixesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListPublicDelegatedPrefixesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListPublicDelegatedPrefixesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListPublicDelegatedPrefixesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +211,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeletePublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -177,6 +227,9 @@ def delete( PublicDelegatedPrefixes.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -200,25 +253,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}".format( - host=self._host, - project=request.project, - region=request.region, - public_delegated_prefix=request.public_delegated_prefix, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_delegated_prefix", "publicDelegatedPrefix"), + ("region", "region"), + ] + + request_kwargs = compute.DeletePublicDelegatedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeletePublicDelegatedPrefixeRequest.to_json( + compute.DeletePublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeletePublicDelegatedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -228,10 +312,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefix: r"""Call the get method over HTTP. @@ -242,6 +328,9 @@ def get( PublicDelegatedPrefixes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -259,23 +348,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}".format( - host=self._host, - project=request.project, - region=request.region, - public_delegated_prefix=request.public_delegated_prefix, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_delegated_prefix", "publicDelegatedPrefix"), + ("region", "region"), + ] + + request_kwargs = compute.GetPublicDelegatedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetPublicDelegatedPrefixeRequest.to_json( + compute.GetPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -287,10 +409,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -301,6 +425,9 @@ def insert( PublicDelegatedPrefixes.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -324,30 +451,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes", + "body": "public_delegated_prefix_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertPublicDelegatedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PublicDelegatedPrefix.to_json( - request.public_delegated_prefix_resource, + compute.PublicDelegatedPrefix(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertPublicDelegatedPrefixeRequest.to_json( + compute.InsertPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertPublicDelegatedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -358,10 +517,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListPublicDelegatedPrefixesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.PublicDelegatedPrefixList: r"""Call the list method over HTTP. @@ -372,6 +533,9 @@ def list( PublicDelegatedPrefixes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -380,30 +544,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListPublicDelegatedPrefixesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPublicDelegatedPrefixesRequest.to_json( + compute.ListPublicDelegatedPrefixesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListPublicDelegatedPrefixesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListPublicDelegatedPrefixesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListPublicDelegatedPrefixesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListPublicDelegatedPrefixesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListPublicDelegatedPrefixesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -415,10 +604,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchPublicDelegatedPrefixeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -429,6 +620,9 @@ def patch( PublicDelegatedPrefixes.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -452,33 +646,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}", + "body": "public_delegated_prefix_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("public_delegated_prefix", "publicDelegatedPrefix"), + ("region", "region"), + ] + + request_kwargs = compute.PatchPublicDelegatedPrefixeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.PublicDelegatedPrefix.to_json( - request.public_delegated_prefix_resource, + compute.PublicDelegatedPrefix(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}".format( - host=self._host, - project=request.project, - region=request.region, - public_delegated_prefix=request.public_delegated_prefix, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchPublicDelegatedPrefixeRequest.to_json( + compute.PatchPublicDelegatedPrefixeRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchPublicDelegatedPrefixeRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -489,5 +713,51 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListPublicDelegatedPrefixesRequest], + compute.PublicDelegatedPrefixAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeletePublicDelegatedPrefixeRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetPublicDelegatedPrefixeRequest], compute.PublicDelegatedPrefix + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertPublicDelegatedPrefixeRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListPublicDelegatedPrefixesRequest], compute.PublicDelegatedPrefixList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchPublicDelegatedPrefixeRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("PublicDelegatedPrefixesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_autoscalers/client.py b/google/cloud/compute_v1/services/region_autoscalers/client.py index 9749a8484..baa0afba0 100644 --- a/google/cloud/compute_v1/services/region_autoscalers/client.py +++ b/google/cloud/compute_v1/services/region_autoscalers/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_autoscalers import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionAutoscalersTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,23 +339,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionAutoscalerRequest = None, + request: Union[compute.DeleteRegionAutoscalerRequest, dict] = None, *, project: str = None, region: str = None, autoscaler: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified autoscaler. Args: - request (google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest, dict]): The request object. A request message for RegionAutoscalers.Delete. See the method description for details. @@ -427,19 +439,19 @@ def delete( def get( self, - request: compute.GetRegionAutoscalerRequest = None, + request: Union[compute.GetRegionAutoscalerRequest, dict] = None, *, project: str = None, region: str = None, autoscaler: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Autoscaler: r"""Returns the specified autoscaler. Args: - request (google.cloud.compute_v1.types.GetRegionAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.GetRegionAutoscalerRequest, dict]): The request object. A request message for RegionAutoscalers.Get. See the method description for details. @@ -518,12 +530,12 @@ def get( def insert( self, - request: compute.InsertRegionAutoscalerRequest = None, + request: Union[compute.InsertRegionAutoscalerRequest, dict] = None, *, project: str = None, region: str = None, autoscaler_resource: compute.Autoscaler = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -531,7 +543,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRegionAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionAutoscalerRequest, dict]): The request object. A request message for RegionAutoscalers.Insert. See the method description for details. @@ -614,11 +626,11 @@ def insert( def list( self, - request: compute.ListRegionAutoscalersRequest = None, + request: Union[compute.ListRegionAutoscalersRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -626,7 +638,7 @@ def list( specified region. Args: - request (google.cloud.compute_v1.types.ListRegionAutoscalersRequest): + request (Union[google.cloud.compute_v1.types.ListRegionAutoscalersRequest, dict]): The request object. A request message for RegionAutoscalers.List. See the method description for details. @@ -697,12 +709,12 @@ def list( def patch( self, - request: compute.PatchRegionAutoscalerRequest = None, + request: Union[compute.PatchRegionAutoscalerRequest, dict] = None, *, project: str = None, region: str = None, autoscaler_resource: compute.Autoscaler = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -712,7 +724,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchRegionAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.PatchRegionAutoscalerRequest, dict]): The request object. A request message for RegionAutoscalers.Patch. See the method description for details. @@ -795,12 +807,12 @@ def patch( def update( self, - request: compute.UpdateRegionAutoscalerRequest = None, + request: Union[compute.UpdateRegionAutoscalerRequest, dict] = None, *, project: str = None, region: str = None, autoscaler_resource: compute.Autoscaler = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -808,7 +820,7 @@ def update( the data included in the request. Args: - request (google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest): + request (Union[google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest, dict]): The request object. A request message for RegionAutoscalers.Update. See the method description for details. @@ -889,6 +901,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_autoscalers/pagers.py b/google/cloud/compute_v1/services/region_autoscalers/pagers.py index 9eadae3fc..fe458439d 100644 --- a/google/cloud/compute_v1/services/region_autoscalers/pagers.py +++ b/google/cloud/compute_v1/services/region_autoscalers/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionAutoscalerList]: + def pages(self) -> Iterator[compute.RegionAutoscalerList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Autoscaler]: + def __iter__(self) -> Iterator[compute.Autoscaler]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_autoscalers/transports/base.py b/google/cloud/compute_v1/services/region_autoscalers/transports/base.py index b5ff9e4b5..8633a10b3 100644 --- a/google/cloud/compute_v1/services/region_autoscalers/transports/base.py +++ b/google/cloud/compute_v1/services/region_autoscalers/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionAutoscalersTransport(abc.ABC): """Abstract transport class for RegionAutoscalers.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py b/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py index 1fd39a445..a0262bcb0 100644 --- a/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py +++ b/google/cloud/compute_v1/services/region_autoscalers/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionAutoscalersTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionAutoscalersTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionAutoscalersRestTransport(RegionAutoscalersTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionAutoscalers.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}".format( - host=self._host, - project=request.project, - region=request.region, - autoscaler=request.autoscaler, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("autoscaler", "autoscaler"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionAutoscalerRequest.to_json( + compute.DeleteRegionAutoscalerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Autoscaler: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionAutoscalers.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,23 +263,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}".format( - host=self._host, - project=request.project, - region=request.region, - autoscaler=request.autoscaler, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("autoscaler", "autoscaler"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionAutoscalerRequest.to_json( + compute.GetRegionAutoscalerRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +322,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -239,6 +338,9 @@ def insert( RegionAutoscalers.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -262,30 +364,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/autoscalers", + "body": "autoscaler_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Autoscaler.to_json( - request.autoscaler_resource, + compute.Autoscaler(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/autoscalers".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionAutoscalerRequest.to_json( + compute.InsertRegionAutoscalerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -296,10 +430,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionAutoscalersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionAutoscalerList: r"""Call the list method over HTTP. @@ -310,6 +446,9 @@ def list( RegionAutoscalers.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -318,30 +457,55 @@ def list( Contains a list of autoscalers. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/autoscalers".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/autoscalers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionAutoscalersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionAutoscalersRequest.to_json( + compute.ListRegionAutoscalersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionAutoscalersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionAutoscalersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionAutoscalersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionAutoscalersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionAutoscalersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -353,10 +517,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRegionAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -367,6 +533,9 @@ def patch( RegionAutoscalers.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -390,32 +559,62 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/autoscalers", + "body": "autoscaler_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchRegionAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Autoscaler.to_json( - request.autoscaler_resource, + compute.Autoscaler(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/autoscalers".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRegionAutoscalerRequest.to_json( + compute.PatchRegionAutoscalerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRegionAutoscalerRequest.autoscaler in request: - query_params["autoscaler"] = request.autoscaler - if compute.PatchRegionAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -426,10 +625,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateRegionAutoscalerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -440,6 +641,9 @@ def update( RegionAutoscalers.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -463,32 +667,62 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/regions/{region}/autoscalers", + "body": "autoscaler_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.UpdateRegionAutoscalerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Autoscaler.to_json( - request.autoscaler_resource, + compute.Autoscaler(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/autoscalers".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateRegionAutoscalerRequest.to_json( + compute.UpdateRegionAutoscalerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateRegionAutoscalerRequest.autoscaler in request: - query_params["autoscaler"] = request.autoscaler - if compute.UpdateRegionAutoscalerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -499,5 +733,42 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionAutoscalerRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetRegionAutoscalerRequest], compute.Autoscaler]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionAutoscalerRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListRegionAutoscalersRequest], compute.RegionAutoscalerList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionAutoscalerRequest], compute.Operation]: + return self._patch + + @property + def update( + self, + ) -> Callable[[compute.UpdateRegionAutoscalerRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("RegionAutoscalersRestTransport",) diff --git a/google/cloud/compute_v1/services/region_backend_services/client.py b/google/cloud/compute_v1/services/region_backend_services/client.py index 7521b6ee4..0a3c2d46d 100644 --- a/google/cloud/compute_v1/services/region_backend_services/client.py +++ b/google/cloud/compute_v1/services/region_backend_services/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_backend_services import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionBackendServicesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,16 +339,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionBackendServiceRequest = None, + request: Union[compute.DeleteRegionBackendServiceRequest, dict] = None, *, project: str = None, region: str = None, backend_service: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -345,7 +357,7 @@ def delete( resource. Args: - request (google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest, dict]): The request object. A request message for RegionBackendServices.Delete. See the method description for details. @@ -430,12 +442,12 @@ def delete( def get( self, - request: compute.GetRegionBackendServiceRequest = None, + request: Union[compute.GetRegionBackendServiceRequest, dict] = None, *, project: str = None, region: str = None, backend_service: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendService: @@ -443,7 +455,7 @@ def get( resource. Args: - request (google.cloud.compute_v1.types.GetRegionBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.GetRegionBackendServiceRequest, dict]): The request object. A request message for RegionBackendServices.Get. See the method description for details. @@ -528,13 +540,13 @@ def get( def get_health( self, - request: compute.GetHealthRegionBackendServiceRequest = None, + request: Union[compute.GetHealthRegionBackendServiceRequest, dict] = None, *, project: str = None, region: str = None, backend_service: str = None, resource_group_reference_resource: compute.ResourceGroupReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceGroupHealth: @@ -542,7 +554,7 @@ def get_health( regional BackendService. Args: - request (google.cloud.compute_v1.types.GetHealthRegionBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.GetHealthRegionBackendServiceRequest, dict]): The request object. A request message for RegionBackendServices.GetHealth. See the method description for details. @@ -623,12 +635,12 @@ def get_health( def insert( self, - request: compute.InsertRegionBackendServiceRequest = None, + request: Union[compute.InsertRegionBackendServiceRequest, dict] = None, *, project: str = None, region: str = None, backend_service_resource: compute.BackendService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -638,7 +650,7 @@ def insert( overview. Args: - request (google.cloud.compute_v1.types.InsertRegionBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionBackendServiceRequest, dict]): The request object. A request message for RegionBackendServices.Insert. See the method description for details. @@ -721,11 +733,11 @@ def insert( def list( self, - request: compute.ListRegionBackendServicesRequest = None, + request: Union[compute.ListRegionBackendServicesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -734,7 +746,7 @@ def list( given region. Args: - request (google.cloud.compute_v1.types.ListRegionBackendServicesRequest): + request (Union[google.cloud.compute_v1.types.ListRegionBackendServicesRequest, dict]): The request object. A request message for RegionBackendServices.List. See the method description for details. @@ -806,13 +818,13 @@ def list( def patch( self, - request: compute.PatchRegionBackendServiceRequest = None, + request: Union[compute.PatchRegionBackendServiceRequest, dict] = None, *, project: str = None, region: str = None, backend_service: str = None, backend_service_resource: compute.BackendService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -823,7 +835,7 @@ def patch( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.PatchRegionBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.PatchRegionBackendServiceRequest, dict]): The request object. A request message for RegionBackendServices.Patch. See the method description for details. @@ -917,13 +929,13 @@ def patch( def update( self, - request: compute.UpdateRegionBackendServiceRequest = None, + request: Union[compute.UpdateRegionBackendServiceRequest, dict] = None, *, project: str = None, region: str = None, backend_service: str = None, backend_service_resource: compute.BackendService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -932,7 +944,7 @@ def update( information, see Backend services overview . Args: - request (google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest): + request (Union[google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest, dict]): The request object. A request message for RegionBackendServices.Update. See the method description for details. @@ -1024,6 +1036,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_backend_services/pagers.py b/google/cloud/compute_v1/services/region_backend_services/pagers.py index c26346ce6..12e433c1d 100644 --- a/google/cloud/compute_v1/services/region_backend_services/pagers.py +++ b/google/cloud/compute_v1/services/region_backend_services/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.BackendServiceList]: + def pages(self) -> Iterator[compute.BackendServiceList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.BackendService]: + def __iter__(self) -> Iterator[compute.BackendService]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_backend_services/transports/base.py b/google/cloud/compute_v1/services/region_backend_services/transports/base.py index 654d4530e..6ac7c54e8 100644 --- a/google/cloud/compute_v1/services/region_backend_services/transports/base.py +++ b/google/cloud/compute_v1/services/region_backend_services/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionBackendServicesTransport(abc.ABC): """Abstract transport class for RegionBackendServices.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -181,6 +145,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_backend_services/transports/rest.py b/google/cloud/compute_v1/services/region_backend_services/transports/rest.py index 453f2ed72..40500bd4a 100644 --- a/google/cloud/compute_v1/services/region_backend_services/transports/rest.py +++ b/google/cloud/compute_v1/services/region_backend_services/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionBackendServicesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionBackendServicesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionBackendServicesRestTransport(RegionBackendServicesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionBackendServices.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - region=request.region, - backend_service=request.backend_service, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionBackendServiceRequest.to_json( + compute.DeleteRegionBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendService: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionBackendServices.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -200,23 +266,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - region=request.region, - backend_service=request.backend_service, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionBackendServiceRequest.to_json( + compute.GetRegionBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -228,10 +327,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_health( + def _get_health( self, request: compute.GetHealthRegionBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceGroupHealth: r"""Call the get health method over HTTP. @@ -242,6 +343,9 @@ def get_health( RegionBackendServices.GetHealth. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -250,31 +354,63 @@ def get_health( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/getHealth", + "body": "resource_group_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetHealthRegionBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ResourceGroupReference.to_json( - request.resource_group_reference_resource, + compute.ResourceGroupReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/getHealth".format( - host=self._host, - project=request.project, - region=request.region, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetHealthRegionBackendServiceRequest.to_json( + compute.GetHealthRegionBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -287,10 +423,12 @@ def get_health( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -301,6 +439,9 @@ def insert( RegionBackendServices.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -324,30 +465,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices", + "body": "backend_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendService.to_json( - request.backend_service_resource, + compute.BackendService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionBackendServiceRequest.to_json( + compute.InsertRegionBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -358,10 +531,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionBackendServicesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.BackendServiceList: r"""Call the list method over HTTP. @@ -372,6 +547,9 @@ def list( RegionBackendServices.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -382,30 +560,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionBackendServicesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionBackendServicesRequest.to_json( + compute.ListRegionBackendServicesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionBackendServicesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionBackendServicesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionBackendServicesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionBackendServicesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionBackendServicesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -417,10 +620,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRegionBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -431,6 +636,9 @@ def patch( RegionBackendServices.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -454,33 +662,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}", + "body": "backend_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchRegionBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendService.to_json( - request.backend_service_resource, + compute.BackendService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - region=request.region, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRegionBackendServiceRequest.to_json( + compute.PatchRegionBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRegionBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -491,10 +729,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateRegionBackendServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -505,6 +745,9 @@ def update( RegionBackendServices.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -528,33 +771,63 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}", + "body": "backend_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("backend_service", "backendService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.UpdateRegionBackendServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BackendService.to_json( - request.backend_service_resource, + compute.BackendService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}".format( - host=self._host, - project=request.project, - region=request.region, - backend_service=request.backend_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateRegionBackendServiceRequest.to_json( + compute.UpdateRegionBackendServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateRegionBackendServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -565,5 +838,55 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionBackendServiceRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetRegionBackendServiceRequest], compute.BackendService]: + return self._get + + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionBackendServiceRequest], + compute.BackendServiceGroupHealth, + ]: + return self._get_health + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionBackendServiceRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionBackendServicesRequest], compute.BackendServiceList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionBackendServiceRequest], compute.Operation]: + return self._patch + + @property + def update( + self, + ) -> Callable[[compute.UpdateRegionBackendServiceRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("RegionBackendServicesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_commitments/client.py b/google/cloud/compute_v1/services/region_commitments/client.py index 582f000c4..f24726838 100644 --- a/google/cloud/compute_v1/services/region_commitments/client.py +++ b/google/cloud/compute_v1/services/region_commitments/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_commitments import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionCommitmentsTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListRegionCommitmentsRequest = None, + request: Union[compute.AggregatedListRegionCommitmentsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -343,7 +355,7 @@ def aggregated_list( region. Args: - request (google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest, dict]): The request object. A request message for RegionCommitments.AggregatedList. See the method description for details. @@ -404,12 +416,12 @@ def aggregated_list( def get( self, - request: compute.GetRegionCommitmentRequest = None, + request: Union[compute.GetRegionCommitmentRequest, dict] = None, *, project: str = None, region: str = None, commitment: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Commitment: @@ -418,7 +430,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetRegionCommitmentRequest): + request (Union[google.cloud.compute_v1.types.GetRegionCommitmentRequest, dict]): The request object. A request message for RegionCommitments.Get. See the method description for details. @@ -493,12 +505,12 @@ def get( def insert( self, - request: compute.InsertRegionCommitmentRequest = None, + request: Union[compute.InsertRegionCommitmentRequest, dict] = None, *, project: str = None, region: str = None, commitment_resource: compute.Commitment = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -506,7 +518,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRegionCommitmentRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionCommitmentRequest, dict]): The request object. A request message for RegionCommitments.Insert. See the method description for details. @@ -587,11 +599,11 @@ def insert( def list( self, - request: compute.ListRegionCommitmentsRequest = None, + request: Union[compute.ListRegionCommitmentsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -599,7 +611,7 @@ def list( specified region. Args: - request (google.cloud.compute_v1.types.ListRegionCommitmentsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionCommitmentsRequest, dict]): The request object. A request message for RegionCommitments.List. See the method description for details. @@ -667,6 +679,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_commitments/pagers.py b/google/cloud/compute_v1/services/region_commitments/pagers.py index b33d259fe..670b7be28 100644 --- a/google/cloud/compute_v1/services/region_commitments/pagers.py +++ b/google/cloud/compute_v1/services/region_commitments/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.CommitmentAggregatedList]: + def pages(self) -> Iterator[compute.CommitmentAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.CommitmentsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.CommitmentsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.CommitmentList]: + def pages(self) -> Iterator[compute.CommitmentList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Commitment]: + def __iter__(self) -> Iterator[compute.Commitment]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_commitments/transports/base.py b/google/cloud/compute_v1/services/region_commitments/transports/base.py index 33cf045d1..c5c734a61 100644 --- a/google/cloud/compute_v1/services/region_commitments/transports/base.py +++ b/google/cloud/compute_v1/services/region_commitments/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionCommitmentsTransport(abc.ABC): """Abstract transport class for RegionCommitments.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/region_commitments/transports/rest.py b/google/cloud/compute_v1/services/region_commitments/transports/rest.py index 80920dc9d..101fdb243 100644 --- a/google/cloud/compute_v1/services/region_commitments/transports/rest.py +++ b/google/cloud/compute_v1/services/region_commitments/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionCommitmentsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionCommitmentsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionCommitmentsRestTransport(RegionCommitmentsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListRegionCommitmentsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.CommitmentAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( RegionCommitments.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/commitments".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/commitments", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListRegionCommitmentsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListRegionCommitmentsRequest.to_json( + compute.AggregatedListRegionCommitmentsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListRegionCommitmentsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListRegionCommitmentsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListRegionCommitmentsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListRegionCommitmentsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListRegionCommitmentsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListRegionCommitmentsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetRegionCommitmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Commitment: r"""Call the get method over HTTP. @@ -174,6 +225,9 @@ def get( RegionCommitments.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -191,23 +245,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}".format( - host=self._host, - project=request.project, - region=request.region, - commitment=request.commitment, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("commitment", "commitment"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionCommitmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionCommitmentRequest.to_json( + compute.GetRegionCommitmentRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -219,10 +304,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionCommitmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -233,6 +320,9 @@ def insert( RegionCommitments.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -256,30 +346,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/commitments", + "body": "commitment_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionCommitmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Commitment.to_json( - request.commitment_resource, + compute.Commitment(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/commitments".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionCommitmentRequest.to_json( + compute.InsertRegionCommitmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionCommitmentRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -290,10 +412,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionCommitmentsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.CommitmentList: r"""Call the list method over HTTP. @@ -304,6 +428,9 @@ def list( RegionCommitments.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -314,30 +441,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/commitments".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/commitments", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionCommitmentsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionCommitmentsRequest.to_json( + compute.ListRegionCommitmentsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionCommitmentsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionCommitmentsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionCommitmentsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionCommitmentsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionCommitmentsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -349,5 +501,33 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListRegionCommitmentsRequest], + compute.CommitmentAggregatedList, + ]: + return self._aggregated_list + + @property + def get(self) -> Callable[[compute.GetRegionCommitmentRequest], compute.Commitment]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionCommitmentRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListRegionCommitmentsRequest], compute.CommitmentList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RegionCommitmentsRestTransport",) diff --git a/google/cloud/compute_v1/services/region_disk_types/client.py b/google/cloud/compute_v1/services/region_disk_types/client.py index 8519d3a9e..fb3e72bcb 100644 --- a/google/cloud/compute_v1/services/region_disk_types/client.py +++ b/google/cloud/compute_v1/services/region_disk_types/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_disk_types import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionDiskTypesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,16 +337,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetRegionDiskTypeRequest = None, + request: Union[compute.GetRegionDiskTypeRequest, dict] = None, *, project: str = None, region: str = None, disk_type: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskType: @@ -343,7 +355,7 @@ def get( of available disk types by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRegionDiskTypeRequest): + request (Union[google.cloud.compute_v1.types.GetRegionDiskTypeRequest, dict]): The request object. A request message for RegionDiskTypes.Get. See the method description for details. @@ -422,11 +434,11 @@ def get( def list( self, - request: compute.ListRegionDiskTypesRequest = None, + request: Union[compute.ListRegionDiskTypesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -434,7 +446,7 @@ def list( the specified project. Args: - request (google.cloud.compute_v1.types.ListRegionDiskTypesRequest): + request (Union[google.cloud.compute_v1.types.ListRegionDiskTypesRequest, dict]): The request object. A request message for RegionDiskTypes.List. See the method description for details. @@ -502,6 +514,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_disk_types/pagers.py b/google/cloud/compute_v1/services/region_disk_types/pagers.py index 3a283a8ac..913daba02 100644 --- a/google/cloud/compute_v1/services/region_disk_types/pagers.py +++ b/google/cloud/compute_v1/services/region_disk_types/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionDiskTypeList]: + def pages(self) -> Iterator[compute.RegionDiskTypeList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.DiskType]: + def __iter__(self) -> Iterator[compute.DiskType]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_disk_types/transports/base.py b/google/cloud/compute_v1/services/region_disk_types/transports/base.py index c34ec1dd7..5545996ef 100644 --- a/google/cloud/compute_v1/services/region_disk_types/transports/base.py +++ b/google/cloud/compute_v1/services/region_disk_types/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionDiskTypesTransport(abc.ABC): """Abstract transport class for RegionDiskTypes.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -167,6 +131,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/region_disk_types/transports/rest.py b/google/cloud/compute_v1/services/region_disk_types/transports/rest.py index 7bd3747c8..bf4543851 100644 --- a/google/cloud/compute_v1/services/region_disk_types/transports/rest.py +++ b/google/cloud/compute_v1/services/region_disk_types/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionDiskTypesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionDiskTypesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionDiskTypesRestTransport(RegionDiskTypesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetRegionDiskTypeRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskType: r"""Call the get method over HTTP. @@ -112,6 +139,9 @@ def get( RegionDiskTypes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -131,23 +161,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/diskTypes/{disk_type}".format( - host=self._host, - project=request.project, - region=request.region, - disk_type=request.disk_type, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/diskTypes/{disk_type}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk_type", "diskType"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionDiskTypeRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionDiskTypeRequest.to_json( + compute.GetRegionDiskTypeRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +218,12 @@ def get( # Return the response return compute.DiskType.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionDiskTypesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionDiskTypeList: r"""Call the list method over HTTP. @@ -171,6 +234,9 @@ def list( RegionDiskTypes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -179,30 +245,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/diskTypes".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/diskTypes", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionDiskTypesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionDiskTypesRequest.to_json( + compute.ListRegionDiskTypesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionDiskTypesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionDiskTypesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionDiskTypesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionDiskTypesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionDiskTypesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -214,5 +303,18 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def get(self) -> Callable[[compute.GetRegionDiskTypeRequest], compute.DiskType]: + return self._get + + @property + def list( + self, + ) -> Callable[[compute.ListRegionDiskTypesRequest], compute.RegionDiskTypeList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RegionDiskTypesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_disks/client.py b/google/cloud/compute_v1/services/region_disks/client.py index 66955ffe8..3a70e73f5 100644 --- a/google/cloud/compute_v1/services/region_disks/client.py +++ b/google/cloud/compute_v1/services/region_disks/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_disks import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionDisksTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,17 +335,18 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_resource_policies( self, - request: compute.AddResourcePoliciesRegionDiskRequest = None, + request: Union[compute.AddResourcePoliciesRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk: str = None, region_disks_add_resource_policies_request_resource: compute.RegionDisksAddResourcePoliciesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -343,7 +355,7 @@ def add_resource_policies( this disk for scheduling snapshot creation. Args: - request (google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest, dict]): The request object. A request message for RegionDisks.AddResourcePolicies. See the method description for details. @@ -437,20 +449,20 @@ def add_resource_policies( def create_snapshot( self, - request: compute.CreateSnapshotRegionDiskRequest = None, + request: Union[compute.CreateSnapshotRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk: str = None, snapshot_resource: compute.Snapshot = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Creates a snapshot of this regional disk. Args: - request (google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest, dict]): The request object. A request message for RegionDisks.CreateSnapshot. See the method description for details. @@ -540,12 +552,12 @@ def create_snapshot( def delete( self, - request: compute.DeleteRegionDiskRequest = None, + request: Union[compute.DeleteRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -556,7 +568,7 @@ def delete( from the disk. You must separately delete snapshots. Args: - request (google.cloud.compute_v1.types.DeleteRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionDiskRequest, dict]): The request object. A request message for RegionDisks.Delete. See the method description for details. @@ -639,19 +651,19 @@ def delete( def get( self, - request: compute.GetRegionDiskRequest = None, + request: Union[compute.GetRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Disk: r"""Returns a specified regional persistent disk. Args: - request (google.cloud.compute_v1.types.GetRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.GetRegionDiskRequest, dict]): The request object. A request message for RegionDisks.Get. See the method description for details. project (str): @@ -730,12 +742,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyRegionDiskRequest = None, + request: Union[compute.GetIamPolicyRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -743,7 +755,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionDiskRequest, dict]): The request object. A request message for RegionDisks.GetIamPolicy. See the method description for details. @@ -852,12 +864,12 @@ def get_iam_policy( def insert( self, - request: compute.InsertRegionDiskRequest = None, + request: Union[compute.InsertRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk_resource: compute.Disk = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -865,7 +877,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionDiskRequest, dict]): The request object. A request message for RegionDisks.Insert. See the method description for details. @@ -946,11 +958,11 @@ def insert( def list( self, - request: compute.ListRegionDisksRequest = None, + request: Union[compute.ListRegionDisksRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -958,7 +970,7 @@ def list( within the specified region. Args: - request (google.cloud.compute_v1.types.ListRegionDisksRequest): + request (Union[google.cloud.compute_v1.types.ListRegionDisksRequest, dict]): The request object. A request message for RegionDisks.List. See the method description for details. @@ -1027,20 +1039,20 @@ def list( def remove_resource_policies( self, - request: compute.RemoveResourcePoliciesRegionDiskRequest = None, + request: Union[compute.RemoveResourcePoliciesRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk: str = None, region_disks_remove_resource_policies_request_resource: compute.RegionDisksRemoveResourcePoliciesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Removes resource policies from a regional disk. Args: - request (google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest, dict]): The request object. A request message for RegionDisks.RemoveResourcePolicies. See the method description for details. @@ -1139,20 +1151,20 @@ def remove_resource_policies( def resize( self, - request: compute.ResizeRegionDiskRequest = None, + request: Union[compute.ResizeRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, disk: str = None, region_disks_resize_request_resource: compute.RegionDisksResizeRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Resizes the specified regional persistent disk. Args: - request (google.cloud.compute_v1.types.ResizeRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.ResizeRegionDiskRequest, dict]): The request object. A request message for RegionDisks.Resize. See the method description for details. @@ -1244,13 +1256,13 @@ def resize( def set_iam_policy( self, - request: compute.SetIamPolicyRegionDiskRequest = None, + request: Union[compute.SetIamPolicyRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_policy_request_resource: compute.RegionSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1258,7 +1270,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionDiskRequest, dict]): The request object. A request message for RegionDisks.SetIamPolicy. See the method description for details. @@ -1378,20 +1390,20 @@ def set_iam_policy( def set_labels( self, - request: compute.SetLabelsRegionDiskRequest = None, + request: Union[compute.SetLabelsRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_labels_request_resource: compute.RegionSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Sets the labels on the target regional disk. Args: - request (google.cloud.compute_v1.types.SetLabelsRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsRegionDiskRequest, dict]): The request object. A request message for RegionDisks.SetLabels. See the method description for details. @@ -1485,13 +1497,13 @@ def set_labels( def test_iam_permissions( self, - request: compute.TestIamPermissionsRegionDiskRequest = None, + request: Union[compute.TestIamPermissionsRegionDiskRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1499,7 +1511,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsRegionDiskRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionDiskRequest, dict]): The request object. A request message for RegionDisks.TestIamPermissions. See the method description for details. @@ -1578,6 +1590,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_disks/pagers.py b/google/cloud/compute_v1/services/region_disks/pagers.py index 1b0fa465a..337976db9 100644 --- a/google/cloud/compute_v1/services/region_disks/pagers.py +++ b/google/cloud/compute_v1/services/region_disks/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.DiskList]: + def pages(self) -> Iterator[compute.DiskList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Disk]: + def __iter__(self) -> Iterator[compute.Disk]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_disks/transports/base.py b/google/cloud/compute_v1/services/region_disks/transports/base.py index a9776d689..99f9482c5 100644 --- a/google/cloud/compute_v1/services/region_disks/transports/base.py +++ b/google/cloud/compute_v1/services/region_disks/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionDisksTransport(abc.ABC): """Abstract transport class for RegionDisks.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -202,6 +166,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_resource_policies( self, diff --git a/google/cloud/compute_v1/services/region_disks/transports/rest.py b/google/cloud/compute_v1/services/region_disks/transports/rest.py index 16a39f097..d2140c5aa 100644 --- a/google/cloud/compute_v1/services/region_disks/transports/rest.py +++ b/google/cloud/compute_v1/services/region_disks/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import RegionDisksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import RegionDisksTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionDisksRestTransport(RegionDisksTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_resource_policies( + def _add_resource_policies( self, request: compute.AddResourcePoliciesRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add resource policies method over HTTP. @@ -112,6 +136,9 @@ def add_resource_policies( RegionDisks.AddResourcePolicies. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,33 +162,63 @@ def add_resource_policies( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies", + "body": "region_disks_add_resource_policies_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.AddResourcePoliciesRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionDisksAddResourcePoliciesRequest.to_json( - request.region_disks_add_resource_policies_request_resource, + compute.RegionDisksAddResourcePoliciesRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies".format( - host=self._host, - project=request.project, - region=request.region, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddResourcePoliciesRegionDiskRequest.to_json( + compute.AddResourcePoliciesRegionDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddResourcePoliciesRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -172,10 +229,12 @@ def add_resource_policies( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def create_snapshot( + def _create_snapshot( self, request: compute.CreateSnapshotRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the create snapshot method over HTTP. @@ -186,6 +245,9 @@ def create_snapshot( RegionDisks.CreateSnapshot. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -209,33 +271,63 @@ def create_snapshot( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}/createSnapshot", + "body": "snapshot_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.CreateSnapshotRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Snapshot.to_json( - request.snapshot_resource, + compute.Snapshot(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{disk}/createSnapshot".format( - host=self._host, - project=request.project, - region=request.region, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.CreateSnapshotRegionDiskRequest.to_json( + compute.CreateSnapshotRegionDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.CreateSnapshotRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -246,10 +338,12 @@ def create_snapshot( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -260,6 +354,9 @@ def delete( RegionDisks.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -283,25 +380,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{disk}".format( - host=self._host, - project=request.project, - region=request.region, - disk=request.disk, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionDiskRequest.to_json( + compute.DeleteRegionDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -311,10 +437,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Disk: r"""Call the get method over HTTP. @@ -325,6 +453,9 @@ def get( RegionDisks.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -345,23 +476,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{disk}".format( - host=self._host, - project=request.project, - region=request.region, - disk=request.disk, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionDiskRequest.to_json( + compute.GetRegionDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -371,10 +533,12 @@ def get( # Return the response return compute.Disk.from_json(response.content, ignore_unknown_fields=True) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -385,6 +549,9 @@ def get_iam_policy( RegionDisks.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -433,30 +600,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyRegionDiskRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyRegionDiskRequest.to_json( + compute.GetIamPolicyRegionDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -466,10 +659,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -480,6 +675,9 @@ def insert( RegionDisks.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -503,32 +701,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks", + "body": "disk_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Disk.to_json( - request.disk_resource, + compute.Disk(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionDiskRequest.to_json( + compute.InsertRegionDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id - if compute.InsertRegionDiskRequest.source_image in request: - query_params["sourceImage"] = request.source_image + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -539,10 +765,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionDisksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DiskList: r"""Call the list method over HTTP. @@ -553,6 +781,9 @@ def list( RegionDisks.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -561,30 +792,53 @@ def list( A list of Disk resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks".format( - host=self._host, project=request.project, region=request.region, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionDisksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionDisksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionDisksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionDisksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionDisksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionDisksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionDisksRequest.to_json( + compute.ListRegionDisksRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -594,10 +848,12 @@ def list( # Return the response return compute.DiskList.from_json(response.content, ignore_unknown_fields=True) - def remove_resource_policies( + def _remove_resource_policies( self, request: compute.RemoveResourcePoliciesRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove resource policies method over HTTP. @@ -608,6 +864,9 @@ def remove_resource_policies( RegionDisks.RemoveResourcePolicies. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -631,33 +890,67 @@ def remove_resource_policies( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies", + "body": "region_disks_remove_resource_policies_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.RemoveResourcePoliciesRegionDiskRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionDisksRemoveResourcePoliciesRequest.to_json( - request.region_disks_remove_resource_policies_request_resource, + compute.RegionDisksRemoveResourcePoliciesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies".format( - host=self._host, - project=request.project, - region=request.region, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveResourcePoliciesRegionDiskRequest.to_json( + compute.RemoveResourcePoliciesRegionDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveResourcePoliciesRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -668,10 +961,12 @@ def remove_resource_policies( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def resize( + def _resize( self, request: compute.ResizeRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the resize method over HTTP. @@ -682,6 +977,9 @@ def resize( RegionDisks.Resize. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -705,33 +1003,61 @@ def resize( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}/resize", + "body": "region_disks_resize_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("disk", "disk"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ResizeRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionDisksResizeRequest.to_json( - request.region_disks_resize_request_resource, + compute.RegionDisksResizeRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{disk}/resize".format( - host=self._host, - project=request.project, - region=request.region, - disk=request.disk, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ResizeRegionDiskRequest.to_json( + compute.ResizeRegionDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ResizeRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -742,10 +1068,12 @@ def resize( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -756,6 +1084,9 @@ def set_iam_policy( RegionDisks.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -804,31 +1135,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetPolicyRequest.to_json( - request.region_set_policy_request_resource, + compute.RegionSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyRegionDiskRequest.to_json( + compute.SetIamPolicyRegionDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -839,10 +1202,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -853,6 +1218,9 @@ def set_labels( RegionDisks.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -876,33 +1244,61 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setLabels", + "body": "region_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetLabelsRequest.to_json( - request.region_set_labels_request_resource, + compute.RegionSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setLabels".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsRegionDiskRequest.to_json( + compute.SetLabelsRegionDiskRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetLabelsRegionDiskRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -913,10 +1309,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsRegionDiskRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -927,6 +1325,9 @@ def test_iam_permissions( RegionDisks.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -935,31 +1336,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsRegionDiskRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/disks/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsRegionDiskRequest.to_json( + compute.TestIamPermissionsRegionDiskRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -972,5 +1405,72 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def add_resource_policies( + self, + ) -> Callable[[compute.AddResourcePoliciesRegionDiskRequest], compute.Operation]: + return self._add_resource_policies + + @property + def create_snapshot( + self, + ) -> Callable[[compute.CreateSnapshotRegionDiskRequest], compute.Operation]: + return self._create_snapshot + + @property + def delete(self) -> Callable[[compute.DeleteRegionDiskRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetRegionDiskRequest], compute.Disk]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyRegionDiskRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertRegionDiskRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListRegionDisksRequest], compute.DiskList]: + return self._list + + @property + def remove_resource_policies( + self, + ) -> Callable[[compute.RemoveResourcePoliciesRegionDiskRequest], compute.Operation]: + return self._remove_resource_policies + + @property + def resize(self) -> Callable[[compute.ResizeRegionDiskRequest], compute.Operation]: + return self._resize + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyRegionDiskRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsRegionDiskRequest], compute.Operation]: + return self._set_labels + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionDiskRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("RegionDisksRestTransport",) diff --git a/google/cloud/compute_v1/services/region_health_check_services/client.py b/google/cloud/compute_v1/services/region_health_check_services/client.py index 2321ff38c..7c486ff2b 100644 --- a/google/cloud/compute_v1/services/region_health_check_services/client.py +++ b/google/cloud/compute_v1/services/region_health_check_services/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_health_check_services import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionHealthCheckServicesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,23 +339,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionHealthCheckServiceRequest = None, + request: Union[compute.DeleteRegionHealthCheckServiceRequest, dict] = None, *, project: str = None, region: str = None, health_check_service: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified regional HealthCheckService. Args: - request (google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest, dict]): The request object. A request message for RegionHealthCheckServices.Delete. See the method description for details. @@ -430,12 +442,12 @@ def delete( def get( self, - request: compute.GetRegionHealthCheckServiceRequest = None, + request: Union[compute.GetRegionHealthCheckServiceRequest, dict] = None, *, project: str = None, region: str = None, health_check_service: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheckService: @@ -443,7 +455,7 @@ def get( resource. Args: - request (google.cloud.compute_v1.types.GetRegionHealthCheckServiceRequest): + request (Union[google.cloud.compute_v1.types.GetRegionHealthCheckServiceRequest, dict]): The request object. A request message for RegionHealthCheckServices.Get. See the method description for details. @@ -516,12 +528,12 @@ def get( def insert( self, - request: compute.InsertRegionHealthCheckServiceRequest = None, + request: Union[compute.InsertRegionHealthCheckServiceRequest, dict] = None, *, project: str = None, region: str = None, health_check_service_resource: compute.HealthCheckService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -530,7 +542,7 @@ def insert( the request. Args: - request (google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest, dict]): The request object. A request message for RegionHealthCheckServices.Insert. See the method description for details. @@ -613,11 +625,11 @@ def insert( def list( self, - request: compute.ListRegionHealthCheckServicesRequest = None, + request: Union[compute.ListRegionHealthCheckServicesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -626,7 +638,7 @@ def list( region. Args: - request (google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest): + request (Union[google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest, dict]): The request object. A request message for RegionHealthCheckServices.List. See the method description for details. @@ -696,13 +708,13 @@ def list( def patch( self, - request: compute.PatchRegionHealthCheckServiceRequest = None, + request: Union[compute.PatchRegionHealthCheckServiceRequest, dict] = None, *, project: str = None, region: str = None, health_check_service: str = None, health_check_service_resource: compute.HealthCheckService = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -712,7 +724,7 @@ def patch( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest): + request (Union[google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest, dict]): The request object. A request message for RegionHealthCheckServices.Patch. See the method description for details. @@ -805,6 +817,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_health_check_services/pagers.py b/google/cloud/compute_v1/services/region_health_check_services/pagers.py index b01067872..e20dd412c 100644 --- a/google/cloud/compute_v1/services/region_health_check_services/pagers.py +++ b/google/cloud/compute_v1/services/region_health_check_services/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.HealthCheckServicesList]: + def pages(self) -> Iterator[compute.HealthCheckServicesList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.HealthCheckService]: + def __iter__(self) -> Iterator[compute.HealthCheckService]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_health_check_services/transports/base.py b/google/cloud/compute_v1/services/region_health_check_services/transports/base.py index 1217f3ece..b6f3d0bfa 100644 --- a/google/cloud/compute_v1/services/region_health_check_services/transports/base.py +++ b/google/cloud/compute_v1/services/region_health_check_services/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionHealthCheckServicesTransport(abc.ABC): """Abstract transport class for RegionHealthCheckServices.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py b/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py index aed7ac66f..0cd381404 100644 --- a/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py +++ b/google/cloud/compute_v1/services/region_health_check_services/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionHealthCheckServicesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionHealthCheckServicesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionHealthCheckServicesRestTransport(RegionHealthCheckServicesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionHealthCheckServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionHealthCheckServices.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}".format( - host=self._host, - project=request.project, - region=request.region, - health_check_service=request.health_check_service, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check_service", "healthCheckService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionHealthCheckServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionHealthCheckServiceRequest.to_json( + compute.DeleteRegionHealthCheckServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionHealthCheckServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionHealthCheckServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheckService: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionHealthCheckServices.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -187,23 +253,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}".format( - host=self._host, - project=request.project, - region=request.region, - health_check_service=request.health_check_service, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check_service", "healthCheckService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionHealthCheckServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionHealthCheckServiceRequest.to_json( + compute.GetRegionHealthCheckServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -215,10 +314,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionHealthCheckServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -229,6 +330,9 @@ def insert( RegionHealthCheckServices.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -252,30 +356,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthCheckServices", + "body": "health_check_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionHealthCheckServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheckService.to_json( - request.health_check_service_resource, + compute.HealthCheckService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthCheckServices".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionHealthCheckServiceRequest.to_json( + compute.InsertRegionHealthCheckServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionHealthCheckServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -286,10 +422,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionHealthCheckServicesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheckServicesList: r"""Call the list method over HTTP. @@ -300,6 +438,9 @@ def list( RegionHealthCheckServices.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -308,33 +449,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthCheckServices".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthCheckServices", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionHealthCheckServicesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionHealthCheckServicesRequest.to_json( + compute.ListRegionHealthCheckServicesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionHealthCheckServicesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionHealthCheckServicesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionHealthCheckServicesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionHealthCheckServicesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListRegionHealthCheckServicesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -346,10 +509,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRegionHealthCheckServiceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -360,6 +525,9 @@ def patch( RegionHealthCheckServices.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -383,33 +551,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}", + "body": "health_check_service_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check_service", "healthCheckService"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchRegionHealthCheckServiceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheckService.to_json( - request.health_check_service_resource, + compute.HealthCheckService(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}".format( - host=self._host, - project=request.project, - region=request.region, - health_check_service=request.health_check_service, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRegionHealthCheckServiceRequest.to_json( + compute.PatchRegionHealthCheckServiceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRegionHealthCheckServiceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -420,5 +618,42 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionHealthCheckServiceRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionHealthCheckServiceRequest], compute.HealthCheckService + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionHealthCheckServiceRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionHealthCheckServicesRequest], compute.HealthCheckServicesList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionHealthCheckServiceRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("RegionHealthCheckServicesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_health_checks/client.py b/google/cloud/compute_v1/services/region_health_checks/client.py index f78100c38..66c7c3edb 100644 --- a/google/cloud/compute_v1/services/region_health_checks/client.py +++ b/google/cloud/compute_v1/services/region_health_checks/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_health_checks import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionHealthChecksTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,23 +339,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionHealthCheckRequest = None, + request: Union[compute.DeleteRegionHealthCheckRequest, dict] = None, *, project: str = None, region: str = None, health_check: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified HealthCheck resource. Args: - request (google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest, dict]): The request object. A request message for RegionHealthChecks.Delete. See the method description for details. @@ -429,12 +441,12 @@ def delete( def get( self, - request: compute.GetRegionHealthCheckRequest = None, + request: Union[compute.GetRegionHealthCheckRequest, dict] = None, *, project: str = None, region: str = None, health_check: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheck: @@ -443,7 +455,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetRegionHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.GetRegionHealthCheckRequest, dict]): The request object. A request message for RegionHealthChecks.Get. See the method description for details. @@ -532,12 +544,12 @@ def get( def insert( self, - request: compute.InsertRegionHealthCheckRequest = None, + request: Union[compute.InsertRegionHealthCheckRequest, dict] = None, *, project: str = None, region: str = None, health_check_resource: compute.HealthCheck = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -545,7 +557,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRegionHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionHealthCheckRequest, dict]): The request object. A request message for RegionHealthChecks.Insert. See the method description for details. @@ -628,11 +640,11 @@ def insert( def list( self, - request: compute.ListRegionHealthChecksRequest = None, + request: Union[compute.ListRegionHealthChecksRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -640,7 +652,7 @@ def list( to the specified project. Args: - request (google.cloud.compute_v1.types.ListRegionHealthChecksRequest): + request (Union[google.cloud.compute_v1.types.ListRegionHealthChecksRequest, dict]): The request object. A request message for RegionHealthChecks.List. See the method description for details. @@ -712,13 +724,13 @@ def list( def patch( self, - request: compute.PatchRegionHealthCheckRequest = None, + request: Union[compute.PatchRegionHealthCheckRequest, dict] = None, *, project: str = None, region: str = None, health_check: str = None, health_check_resource: compute.HealthCheck = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -728,7 +740,7 @@ def patch( patch format and processing rules. Args: - request (google.cloud.compute_v1.types.PatchRegionHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.PatchRegionHealthCheckRequest, dict]): The request object. A request message for RegionHealthChecks.Patch. See the method description for details. @@ -822,13 +834,13 @@ def patch( def update( self, - request: compute.UpdateRegionHealthCheckRequest = None, + request: Union[compute.UpdateRegionHealthCheckRequest, dict] = None, *, project: str = None, region: str = None, health_check: str = None, health_check_resource: compute.HealthCheck = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -836,7 +848,7 @@ def update( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest): + request (Union[google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest, dict]): The request object. A request message for RegionHealthChecks.Update. See the method description for details. @@ -928,6 +940,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_health_checks/pagers.py b/google/cloud/compute_v1/services/region_health_checks/pagers.py index a11b8fc26..3d2773aa3 100644 --- a/google/cloud/compute_v1/services/region_health_checks/pagers.py +++ b/google/cloud/compute_v1/services/region_health_checks/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.HealthCheckList]: + def pages(self) -> Iterator[compute.HealthCheckList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.HealthCheck]: + def __iter__(self) -> Iterator[compute.HealthCheck]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_health_checks/transports/base.py b/google/cloud/compute_v1/services/region_health_checks/transports/base.py index 08473df47..e5a4959f1 100644 --- a/google/cloud/compute_v1/services/region_health_checks/transports/base.py +++ b/google/cloud/compute_v1/services/region_health_checks/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionHealthChecksTransport(abc.ABC): """Abstract transport class for RegionHealthChecks.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_health_checks/transports/rest.py b/google/cloud/compute_v1/services/region_health_checks/transports/rest.py index 5a3fce8f6..6077ae681 100644 --- a/google/cloud/compute_v1/services/region_health_checks/transports/rest.py +++ b/google/cloud/compute_v1/services/region_health_checks/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionHealthChecksTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionHealthChecksTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionHealthChecksRestTransport(RegionHealthChecksTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionHealthChecks.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}".format( - host=self._host, - project=request.project, - region=request.region, - health_check=request.health_check, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionHealthCheckRequest.to_json( + compute.DeleteRegionHealthCheckRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheck: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionHealthChecks.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -205,23 +271,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}".format( - host=self._host, - project=request.project, - region=request.region, - health_check=request.health_check, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionHealthCheckRequest.to_json( + compute.GetRegionHealthCheckRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -233,10 +330,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -247,6 +346,9 @@ def insert( RegionHealthChecks.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -270,30 +372,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthChecks", + "body": "health_check_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheck.to_json( - request.health_check_resource, + compute.HealthCheck(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthChecks".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionHealthCheckRequest.to_json( + compute.InsertRegionHealthCheckRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -304,10 +438,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionHealthChecksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.HealthCheckList: r"""Call the list method over HTTP. @@ -318,6 +454,9 @@ def list( RegionHealthChecks.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -328,30 +467,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthChecks".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthChecks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionHealthChecksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionHealthChecksRequest.to_json( + compute.ListRegionHealthChecksRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionHealthChecksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionHealthChecksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionHealthChecksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionHealthChecksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionHealthChecksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -363,10 +527,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRegionHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -377,6 +543,9 @@ def patch( RegionHealthChecks.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -400,33 +569,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}", + "body": "health_check_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchRegionHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheck.to_json( - request.health_check_resource, + compute.HealthCheck(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}".format( - host=self._host, - project=request.project, - region=request.region, - health_check=request.health_check, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRegionHealthCheckRequest.to_json( + compute.PatchRegionHealthCheckRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRegionHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -437,10 +636,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateRegionHealthCheckRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -451,6 +652,9 @@ def update( RegionHealthChecks.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -474,33 +678,63 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}", + "body": "health_check_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("health_check", "healthCheck"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.UpdateRegionHealthCheckRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.HealthCheck.to_json( - request.health_check_resource, + compute.HealthCheck(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}".format( - host=self._host, - project=request.project, - region=request.region, - health_check=request.health_check, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateRegionHealthCheckRequest.to_json( + compute.UpdateRegionHealthCheckRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateRegionHealthCheckRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -511,5 +745,44 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionHealthCheckRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetRegionHealthCheckRequest], compute.HealthCheck]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionHealthCheckRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListRegionHealthChecksRequest], compute.HealthCheckList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionHealthCheckRequest], compute.Operation]: + return self._patch + + @property + def update( + self, + ) -> Callable[[compute.UpdateRegionHealthCheckRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("RegionHealthChecksRestTransport",) diff --git a/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/google/cloud/compute_v1/services/region_instance_group_managers/client.py index a16cb48e7..247b98aad 100644 --- a/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_instance_group_managers import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionInstanceGroupManagersTransport, DEFAULT_CLIENT_INFO @@ -267,8 +271,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -330,17 +341,20 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def abandon_instances( self, - request: compute.AbandonInstancesRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.AbandonInstancesRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_abandon_instances_request_resource: compute.RegionInstanceGroupManagersAbandonInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -362,7 +376,7 @@ def abandon_instances( maximum of 1000 instances with this method per request. Args: - request (google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.AbandonInstances. See the method description for details. @@ -466,13 +480,15 @@ def abandon_instances( def apply_updates_to_instances( self, - request: compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_apply_updates_request_resource: compute.RegionInstanceGroupManagersApplyUpdatesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -480,7 +496,7 @@ def apply_updates_to_instances( instance group. Args: - request (google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.ApplyUpdatesToInstances. See the method description for details. @@ -590,13 +606,15 @@ def apply_updates_to_instances( def create_instances( self, - request: compute.CreateInstancesRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.CreateInstancesRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_create_instances_request_resource: compute.RegionInstanceGroupManagersCreateInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -610,7 +628,7 @@ def create_instances( listmanagedinstances method. Args: - request (google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.CreateInstances. See the method description for details. @@ -717,12 +735,12 @@ def create_instances( def delete( self, - request: compute.DeleteRegionInstanceGroupManagerRequest = None, + request: Union[compute.DeleteRegionInstanceGroupManagerRequest, dict] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -730,7 +748,7 @@ def delete( of the instances in that group. Args: - request (google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.Delete. See the method description for details. @@ -815,13 +833,15 @@ def delete( def delete_instances( self, - request: compute.DeleteInstancesRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.DeleteInstancesRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_delete_instances_request_resource: compute.RegionInstanceGroupManagersDeleteInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -842,7 +862,7 @@ def delete_instances( instances with this method per request. Args: - request (google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.DeleteInstances. See the method description for details. @@ -946,13 +966,15 @@ def delete_instances( def delete_per_instance_configs( self, - request: compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_manager_delete_instance_config_req_resource: compute.RegionInstanceGroupManagerDeleteInstanceConfigReq = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -960,7 +982,7 @@ def delete_per_instance_configs( instance group. Args: - request (google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.DeletePerInstanceConfigs. See the method description for details. @@ -1070,12 +1092,12 @@ def delete_per_instance_configs( def get( self, - request: compute.GetRegionInstanceGroupManagerRequest = None, + request: Union[compute.GetRegionInstanceGroupManagerRequest, dict] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManager: @@ -1083,7 +1105,7 @@ def get( managed instance group. Args: - request (google.cloud.compute_v1.types.GetRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.GetRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.Get. See the method description for details. @@ -1162,12 +1184,12 @@ def get( def insert( self, - request: compute.InsertRegionInstanceGroupManagerRequest = None, + request: Union[compute.InsertRegionInstanceGroupManagerRequest, dict] = None, *, project: str = None, region: str = None, instance_group_manager_resource: compute.InstanceGroupManager = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1183,7 +1205,7 @@ def insert( instances. Args: - request (google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.Insert. See the method description for details. @@ -1268,11 +1290,11 @@ def insert( def list( self, - request: compute.ListRegionInstanceGroupManagersRequest = None, + request: Union[compute.ListRegionInstanceGroupManagersRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1280,7 +1302,7 @@ def list( are contained within the specified region. Args: - request (google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.List. See the method description for details. @@ -1352,12 +1374,14 @@ def list( def list_errors( self, - request: compute.ListErrorsRegionInstanceGroupManagersRequest = None, + request: Union[ + compute.ListErrorsRegionInstanceGroupManagersRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListErrorsPager: @@ -1366,7 +1390,7 @@ def list_errors( orderBy query parameters are not supported. Args: - request (google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.ListErrors. See the method description for details. @@ -1449,12 +1473,14 @@ def list_errors( def list_managed_instances( self, - request: compute.ListManagedInstancesRegionInstanceGroupManagersRequest = None, + request: Union[ + compute.ListManagedInstancesRegionInstanceGroupManagersRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListManagedInstancesPager: @@ -1465,7 +1491,7 @@ def list_managed_instances( is not supported. Args: - request (google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.ListManagedInstances. See the method description for details. @@ -1548,12 +1574,14 @@ def list_managed_instances( def list_per_instance_configs( self, - request: compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest = None, + request: Union[ + compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPerInstanceConfigsPager: @@ -1562,7 +1590,7 @@ def list_per_instance_configs( not supported. Args: - request (google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest): + request (Union[google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.ListPerInstanceConfigs. See the method description for details. @@ -1647,13 +1675,13 @@ def list_per_instance_configs( def patch( self, - request: compute.PatchRegionInstanceGroupManagerRequest = None, + request: Union[compute.PatchRegionInstanceGroupManagerRequest, dict] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, instance_group_manager_resource: compute.InstanceGroupManager = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1673,7 +1701,7 @@ def patch( in a MIG, see Updating instances in a MIG. Args: - request (google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.Patch. See the method description for details. @@ -1769,13 +1797,15 @@ def patch( def patch_per_instance_configs( self, - request: compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_manager_patch_instance_config_req_resource: compute.RegionInstanceGroupManagerPatchInstanceConfigReq = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1785,7 +1815,7 @@ def patch_per_instance_configs( patch. Args: - request (google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.PatchPerInstanceConfigs. See the method description for details. @@ -1895,13 +1925,15 @@ def patch_per_instance_configs( def recreate_instances( self, - request: compute.RecreateInstancesRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.RecreateInstancesRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_recreate_request_resource: compute.RegionInstanceGroupManagersRecreateRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1921,7 +1953,7 @@ def recreate_instances( method per request. Args: - request (google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.RecreateInstances. See the method description for details. @@ -2024,13 +2056,13 @@ def recreate_instances( def resize( self, - request: compute.ResizeRegionInstanceGroupManagerRequest = None, + request: Union[compute.ResizeRegionInstanceGroupManagerRequest, dict] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, size: int = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2049,7 +2081,7 @@ def resize( or deleted. Args: - request (google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.Resize. See the method description for details. @@ -2141,13 +2173,15 @@ def resize( def set_instance_template( self, - request: compute.SetInstanceTemplateRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_set_template_request_resource: compute.RegionInstanceGroupManagersSetTemplateRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2156,7 +2190,7 @@ def set_instance_template( Existing instances are not affected. Args: - request (google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.SetInstanceTemplate. See the method description for details. @@ -2261,13 +2295,15 @@ def set_instance_template( def set_target_pools( self, - request: compute.SetTargetPoolsRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.SetTargetPoolsRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_managers_set_target_pools_request_resource: compute.RegionInstanceGroupManagersSetTargetPoolsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2276,7 +2312,7 @@ def set_target_pools( group are not affected. Args: - request (google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.SetTargetPools. See the method description for details. @@ -2380,13 +2416,15 @@ def set_target_pools( def update_per_instance_configs( self, - request: compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest = None, + request: Union[ + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict + ] = None, *, project: str = None, region: str = None, instance_group_manager: str = None, region_instance_group_manager_update_instance_config_req_resource: compute.RegionInstanceGroupManagerUpdateInstanceConfigReq = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -2396,7 +2434,7 @@ def update_per_instance_configs( patch. Args: - request (google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest): + request (Union[google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, dict]): The request object. A request message for RegionInstanceGroupManagers.UpdatePerInstanceConfigs. See the method description for details. @@ -2504,6 +2542,19 @@ def update_per_instance_configs( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py b/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py index df30d924d..3b53d2231 100644 --- a/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py +++ b/google/cloud/compute_v1/services/region_instance_group_managers/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionInstanceGroupManagerList]: + def pages(self) -> Iterator[compute.RegionInstanceGroupManagerList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceGroupManager]: + def __iter__(self) -> Iterator[compute.InstanceGroupManager]: for page in self.pages: yield from page.items @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionInstanceGroupManagersListErrorsResponse]: + def pages(self) -> Iterator[compute.RegionInstanceGroupManagersListErrorsResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceManagedByIgmError]: + def __iter__(self) -> Iterator[compute.InstanceManagedByIgmError]: for page in self.pages: yield from page.items @@ -202,14 +202,14 @@ def __getattr__(self, name: str) -> Any: @property def pages( self, - ) -> Iterable[compute.RegionInstanceGroupManagersListInstancesResponse]: + ) -> Iterator[compute.RegionInstanceGroupManagersListInstancesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ManagedInstance]: + def __iter__(self) -> Iterator[compute.ManagedInstance]: for page in self.pages: yield from page.managed_instances @@ -270,14 +270,14 @@ def __getattr__(self, name: str) -> Any: @property def pages( self, - ) -> Iterable[compute.RegionInstanceGroupManagersListInstanceConfigsResp]: + ) -> Iterator[compute.RegionInstanceGroupManagersListInstanceConfigsResp]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.PerInstanceConfig]: + def __iter__(self) -> Iterator[compute.PerInstanceConfig]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py b/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py index 46ec6826c..10376d7e0 100644 --- a/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py +++ b/google/cloud/compute_v1/services/region_instance_group_managers/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionInstanceGroupManagersTransport(abc.ABC): """Abstract transport class for RegionInstanceGroupManagers.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -231,6 +195,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def abandon_instances( self, diff --git a/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py b/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py index 5c0a0a2a5..2e83efbab 100644 --- a/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py +++ b/google/cloud/compute_v1/services/region_instance_group_managers/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionInstanceGroupManagersTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionInstanceGroupManagersTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionInstanceGroupManagersRestTransport(RegionInstanceGroupManagersTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def abandon_instances( + def _abandon_instances( self, request: compute.AbandonInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the abandon instances method over HTTP. @@ -112,6 +139,9 @@ def abandon_instances( RegionInstanceGroupManagers.AbandonInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,36 +165,67 @@ def abandon_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances", + "body": "region_instance_group_managers_abandon_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.AbandonInstancesRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersAbandonInstancesRequest.to_json( - request.region_instance_group_managers_abandon_instances_request_resource, + compute.RegionInstanceGroupManagersAbandonInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AbandonInstancesRegionInstanceGroupManagerRequest.to_json( + compute.AbandonInstancesRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.AbandonInstancesRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -175,10 +236,12 @@ def abandon_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def apply_updates_to_instances( + def _apply_updates_to_instances( self, request: compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the apply updates to @@ -190,6 +253,9 @@ def apply_updates_to_instances( RegionInstanceGroupManagers.ApplyUpdatesToInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -213,31 +279,67 @@ def apply_updates_to_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances", + "body": "region_instance_group_managers_apply_updates_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersApplyUpdatesRequest.to_json( - request.region_instance_group_managers_apply_updates_request_resource, + compute.RegionInstanceGroupManagersApplyUpdatesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest.to_json( + compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -248,10 +350,12 @@ def apply_updates_to_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def create_instances( + def _create_instances( self, request: compute.CreateInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the create instances method over HTTP. @@ -262,6 +366,9 @@ def create_instances( RegionInstanceGroupManagers.CreateInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -285,36 +392,67 @@ def create_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances", + "body": "region_instance_group_managers_create_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.CreateInstancesRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersCreateInstancesRequest.to_json( - request.region_instance_group_managers_create_instances_request_resource, + compute.RegionInstanceGroupManagersCreateInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.CreateInstancesRegionInstanceGroupManagerRequest.to_json( + compute.CreateInstancesRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.CreateInstancesRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -325,10 +463,12 @@ def create_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -339,6 +479,9 @@ def delete( RegionInstanceGroupManagers.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -362,25 +505,58 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionInstanceGroupManagerRequest.to_json( + compute.DeleteRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -390,10 +566,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_instances( + def _delete_instances( self, request: compute.DeleteInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete instances method over HTTP. @@ -404,6 +582,9 @@ def delete_instances( RegionInstanceGroupManagers.DeleteInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -427,36 +608,67 @@ def delete_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances", + "body": "region_instance_group_managers_delete_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteInstancesRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersDeleteInstancesRequest.to_json( - request.region_instance_group_managers_delete_instances_request_resource, + compute.RegionInstanceGroupManagersDeleteInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteInstancesRegionInstanceGroupManagerRequest.to_json( + compute.DeleteInstancesRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.DeleteInstancesRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -467,10 +679,12 @@ def delete_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete_per_instance_configs( + def _delete_per_instance_configs( self, request: compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete per instance @@ -482,6 +696,9 @@ def delete_per_instance_configs( RegionInstanceGroupManagers.DeletePerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -505,31 +722,67 @@ def delete_per_instance_configs( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs", + "body": "region_instance_group_manager_delete_instance_config_req_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagerDeleteInstanceConfigReq.to_json( - request.region_instance_group_manager_delete_instance_config_req_resource, + compute.RegionInstanceGroupManagerDeleteInstanceConfigReq( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest.to_json( + compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -540,10 +793,12 @@ def delete_per_instance_configs( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroupManager: r"""Call the get method over HTTP. @@ -554,6 +809,9 @@ def get( RegionInstanceGroupManagers.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -571,23 +829,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionInstanceGroupManagerRequest.to_json( + compute.GetRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -599,10 +890,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -613,6 +906,9 @@ def insert( RegionInstanceGroupManagers.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -636,30 +932,64 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers", + "body": "instance_group_manager_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManager.to_json( - request.instance_group_manager_resource, + compute.InstanceGroupManager(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionInstanceGroupManagerRequest.to_json( + compute.InsertRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -670,10 +1000,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionInstanceGroupManagerList: r"""Call the list method over HTTP. @@ -684,6 +1016,9 @@ def list( RegionInstanceGroupManagers.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -694,33 +1029,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers".format( - host=self._host, project=request.project, region=request.region, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionInstanceGroupManagersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionInstanceGroupManagersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionInstanceGroupManagersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListRegionInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionInstanceGroupManagersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionInstanceGroupManagersRequest.to_json( + compute.ListRegionInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -732,10 +1089,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_errors( + def _list_errors( self, request: compute.ListErrorsRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionInstanceGroupManagersListErrorsResponse: r"""Call the list errors method over HTTP. @@ -746,6 +1105,9 @@ def list_errors( RegionInstanceGroupManagers.ListErrors. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -754,36 +1116,58 @@ def list_errors( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listErrors".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListErrorsRegionInstanceGroupManagersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListErrorsRegionInstanceGroupManagersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListErrorsRegionInstanceGroupManagersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListErrorsRegionInstanceGroupManagersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListErrorsRegionInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listErrors", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListErrorsRegionInstanceGroupManagersRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListErrorsRegionInstanceGroupManagersRequest.to_json( + compute.ListErrorsRegionInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -795,10 +1179,12 @@ def list_errors( response.content, ignore_unknown_fields=True ) - def list_managed_instances( + def _list_managed_instances( self, request: compute.ListManagedInstancesRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionInstanceGroupManagersListInstancesResponse: r"""Call the list managed instances method over HTTP. @@ -809,6 +1195,9 @@ def list_managed_instances( RegionInstanceGroupManagers.ListManagedInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -817,48 +1206,58 @@ def list_managed_instances( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listManagedInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.ListManagedInstancesRegionInstanceGroupManagersRequest.filter - in request - ): - query_params["filter"] = request.filter - if ( - compute.ListManagedInstancesRegionInstanceGroupManagersRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if ( - compute.ListManagedInstancesRegionInstanceGroupManagersRequest.order_by - in request - ): - query_params["orderBy"] = request.order_by - if ( - compute.ListManagedInstancesRegionInstanceGroupManagersRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListManagedInstancesRegionInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listManagedInstances", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListManagedInstancesRegionInstanceGroupManagersRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListManagedInstancesRegionInstanceGroupManagersRequest.to_json( + compute.ListManagedInstancesRegionInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -870,10 +1269,12 @@ def list_managed_instances( response.content, ignore_unknown_fields=True ) - def list_per_instance_configs( + def _list_per_instance_configs( self, request: compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionInstanceGroupManagersListInstanceConfigsResp: r"""Call the list per instance configs method over HTTP. @@ -884,6 +1285,9 @@ def list_per_instance_configs( RegionInstanceGroupManagers.ListPerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -892,48 +1296,58 @@ def list_per_instance_configs( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.filter - in request - ): - query_params["filter"] = request.filter - if ( - compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if ( - compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.order_by - in request - ): - query_params["orderBy"] = request.order_by - if ( - compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest.to_json( + compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -945,10 +1359,12 @@ def list_per_instance_configs( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -959,6 +1375,9 @@ def patch( RegionInstanceGroupManagers.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -982,33 +1401,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}", + "body": "instance_group_manager_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchRegionInstanceGroupManagerRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceGroupManager.to_json( - request.instance_group_manager_resource, + compute.InstanceGroupManager(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRegionInstanceGroupManagerRequest.to_json( + compute.PatchRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRegionInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1019,10 +1468,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def patch_per_instance_configs( + def _patch_per_instance_configs( self, request: compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch per instance @@ -1034,6 +1485,9 @@ def patch_per_instance_configs( RegionInstanceGroupManagers.PatchPerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1057,36 +1511,67 @@ def patch_per_instance_configs( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs", + "body": "region_instance_group_manager_patch_instance_config_req_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagerPatchInstanceConfigReq.to_json( - request.region_instance_group_manager_patch_instance_config_req_resource, + compute.RegionInstanceGroupManagerPatchInstanceConfigReq( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.to_json( + compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1097,10 +1582,12 @@ def patch_per_instance_configs( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def recreate_instances( + def _recreate_instances( self, request: compute.RecreateInstancesRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the recreate instances method over HTTP. @@ -1111,6 +1598,9 @@ def recreate_instances( RegionInstanceGroupManagers.RecreateInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1134,36 +1624,67 @@ def recreate_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances", + "body": "region_instance_group_managers_recreate_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.RecreateInstancesRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersRecreateRequest.to_json( - request.region_instance_group_managers_recreate_request_resource, + compute.RegionInstanceGroupManagersRecreateRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RecreateInstancesRegionInstanceGroupManagerRequest.to_json( + compute.RecreateInstancesRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.RecreateInstancesRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1174,10 +1695,12 @@ def recreate_instances( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def resize( + def _resize( self, request: compute.ResizeRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the resize method over HTTP. @@ -1188,6 +1711,9 @@ def resize( RegionInstanceGroupManagers.Resize. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1211,26 +1737,59 @@ def resize( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ("size", "size"), + ] + + request_kwargs = compute.ResizeRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ResizeRegionInstanceGroupManagerRequest.to_json( + compute.ResizeRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ResizeRegionInstanceGroupManagerRequest.request_id in request: - query_params["requestId"] = request.request_id - query_params["size"] = request.size + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -1240,10 +1799,12 @@ def resize( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_instance_template( + def _set_instance_template( self, request: compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set instance template method over HTTP. @@ -1254,6 +1815,9 @@ def set_instance_template( RegionInstanceGroupManagers.SetInstanceTemplate. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1277,36 +1841,67 @@ def set_instance_template( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate", + "body": "region_instance_group_managers_set_template_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersSetTemplateRequest.to_json( - request.region_instance_group_managers_set_template_request_resource, + compute.RegionInstanceGroupManagersSetTemplateRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.to_json( + compute.SetInstanceTemplateRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.SetInstanceTemplateRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1317,10 +1912,12 @@ def set_instance_template( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_target_pools( + def _set_target_pools( self, request: compute.SetTargetPoolsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set target pools method over HTTP. @@ -1331,6 +1928,9 @@ def set_target_pools( RegionInstanceGroupManagers.SetTargetPools. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1354,36 +1954,67 @@ def set_target_pools( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools", + "body": "region_instance_group_managers_set_target_pools_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.SetTargetPoolsRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagersSetTargetPoolsRequest.to_json( - request.region_instance_group_managers_set_target_pools_request_resource, + compute.RegionInstanceGroupManagersSetTargetPoolsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetTargetPoolsRegionInstanceGroupManagerRequest.to_json( + compute.SetTargetPoolsRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.SetTargetPoolsRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1394,10 +2025,12 @@ def set_target_pools( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update_per_instance_configs( + def _update_per_instance_configs( self, request: compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update per instance @@ -1409,6 +2042,9 @@ def update_per_instance_configs( RegionInstanceGroupManagers.UpdatePerInstanceConfigs. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -1432,36 +2068,67 @@ def update_per_instance_configs( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs", + "body": "region_instance_group_manager_update_instance_config_req_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group_manager", "instanceGroupManager"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupManagerUpdateInstanceConfigReq.to_json( - request.region_instance_group_manager_update_instance_config_req_resource, + compute.RegionInstanceGroupManagerUpdateInstanceConfigReq( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs".format( - host=self._host, - project=request.project, - region=request.region, - instance_group_manager=request.instance_group_manager, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.to_json( + compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -1472,5 +2139,161 @@ def update_per_instance_configs( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def abandon_instances( + self, + ) -> Callable[ + [compute.AbandonInstancesRegionInstanceGroupManagerRequest], compute.Operation + ]: + return self._abandon_instances + + @property + def apply_updates_to_instances( + self, + ) -> Callable[ + [compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest], + compute.Operation, + ]: + return self._apply_updates_to_instances + + @property + def create_instances( + self, + ) -> Callable[ + [compute.CreateInstancesRegionInstanceGroupManagerRequest], compute.Operation + ]: + return self._create_instances + + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionInstanceGroupManagerRequest], compute.Operation]: + return self._delete + + @property + def delete_instances( + self, + ) -> Callable[ + [compute.DeleteInstancesRegionInstanceGroupManagerRequest], compute.Operation + ]: + return self._delete_instances + + @property + def delete_per_instance_configs( + self, + ) -> Callable[ + [compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest], + compute.Operation, + ]: + return self._delete_per_instance_configs + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionInstanceGroupManagerRequest], compute.InstanceGroupManager + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionInstanceGroupManagerRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagerList, + ]: + return self._list + + @property + def list_errors( + self, + ) -> Callable[ + [compute.ListErrorsRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagersListErrorsResponse, + ]: + return self._list_errors + + @property + def list_managed_instances( + self, + ) -> Callable[ + [compute.ListManagedInstancesRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagersListInstancesResponse, + ]: + return self._list_managed_instances + + @property + def list_per_instance_configs( + self, + ) -> Callable[ + [compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest], + compute.RegionInstanceGroupManagersListInstanceConfigsResp, + ]: + return self._list_per_instance_configs + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionInstanceGroupManagerRequest], compute.Operation]: + return self._patch + + @property + def patch_per_instance_configs( + self, + ) -> Callable[ + [compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest], + compute.Operation, + ]: + return self._patch_per_instance_configs + + @property + def recreate_instances( + self, + ) -> Callable[ + [compute.RecreateInstancesRegionInstanceGroupManagerRequest], compute.Operation + ]: + return self._recreate_instances + + @property + def resize( + self, + ) -> Callable[[compute.ResizeRegionInstanceGroupManagerRequest], compute.Operation]: + return self._resize + + @property + def set_instance_template( + self, + ) -> Callable[ + [compute.SetInstanceTemplateRegionInstanceGroupManagerRequest], + compute.Operation, + ]: + return self._set_instance_template + + @property + def set_target_pools( + self, + ) -> Callable[ + [compute.SetTargetPoolsRegionInstanceGroupManagerRequest], compute.Operation + ]: + return self._set_target_pools + + @property + def update_per_instance_configs( + self, + ) -> Callable[ + [compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest], + compute.Operation, + ]: + return self._update_per_instance_configs + + def close(self): + self._session.close() + __all__ = ("RegionInstanceGroupManagersRestTransport",) diff --git a/google/cloud/compute_v1/services/region_instance_groups/client.py b/google/cloud/compute_v1/services/region_instance_groups/client.py index 13b4caca1..1cf88d51e 100644 --- a/google/cloud/compute_v1/services/region_instance_groups/client.py +++ b/google/cloud/compute_v1/services/region_instance_groups/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_instance_groups import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionInstanceGroupsTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,23 +339,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetRegionInstanceGroupRequest = None, + request: Union[compute.GetRegionInstanceGroupRequest, dict] = None, *, project: str = None, region: str = None, instance_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroup: r"""Returns the specified instance group resource. Args: - request (google.cloud.compute_v1.types.GetRegionInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.GetRegionInstanceGroupRequest, dict]): The request object. A request message for RegionInstanceGroups.Get. See the method description for details. @@ -428,11 +440,11 @@ def get( def list( self, - request: compute.ListRegionInstanceGroupsRequest = None, + request: Union[compute.ListRegionInstanceGroupsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -440,7 +452,7 @@ def list( contained within the specified region. Args: - request (google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest, dict]): The request object. A request message for RegionInstanceGroups.List. See the method description for details. @@ -512,13 +524,13 @@ def list( def list_instances( self, - request: compute.ListInstancesRegionInstanceGroupsRequest = None, + request: Union[compute.ListInstancesRegionInstanceGroupsRequest, dict] = None, *, project: str = None, region: str = None, instance_group: str = None, region_instance_groups_list_instances_request_resource: compute.RegionInstanceGroupsListInstancesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListInstancesPager: @@ -529,7 +541,7 @@ def list_instances( The orderBy query parameter is not supported. Args: - request (google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest, dict]): The request object. A request message for RegionInstanceGroups.ListInstances. See the method description for details. @@ -624,13 +636,13 @@ def list_instances( def set_named_ports( self, - request: compute.SetNamedPortsRegionInstanceGroupRequest = None, + request: Union[compute.SetNamedPortsRegionInstanceGroupRequest, dict] = None, *, project: str = None, region: str = None, instance_group: str = None, region_instance_groups_set_named_ports_request_resource: compute.RegionInstanceGroupsSetNamedPortsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -638,7 +650,7 @@ def set_named_ports( instance group. Args: - request (google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest): + request (Union[google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest, dict]): The request object. A request message for RegionInstanceGroups.SetNamedPorts. See the method description for details. @@ -737,6 +749,19 @@ def set_named_ports( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_instance_groups/pagers.py b/google/cloud/compute_v1/services/region_instance_groups/pagers.py index 544b821d9..b7af23a64 100644 --- a/google/cloud/compute_v1/services/region_instance_groups/pagers.py +++ b/google/cloud/compute_v1/services/region_instance_groups/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionInstanceGroupList]: + def pages(self) -> Iterator[compute.RegionInstanceGroupList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceGroup]: + def __iter__(self) -> Iterator[compute.InstanceGroup]: for page in self.pages: yield from page.items @@ -136,14 +136,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionInstanceGroupsListInstances]: + def pages(self) -> Iterator[compute.RegionInstanceGroupsListInstances]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.InstanceWithNamedPorts]: + def __iter__(self) -> Iterator[compute.InstanceWithNamedPorts]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_instance_groups/transports/base.py b/google/cloud/compute_v1/services/region_instance_groups/transports/base.py index 0b146950e..4ab93b791 100644 --- a/google/cloud/compute_v1/services/region_instance_groups/transports/base.py +++ b/google/cloud/compute_v1/services/region_instance_groups/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionInstanceGroupsTransport(abc.ABC): """Abstract transport class for RegionInstanceGroups.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py b/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py index b55c8b823..e15e353d0 100644 --- a/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py +++ b/google/cloud/compute_v1/services/region_instance_groups/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionInstanceGroupsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionInstanceGroupsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionInstanceGroupsRestTransport(RegionInstanceGroupsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetRegionInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.InstanceGroup: r"""Call the get method over HTTP. @@ -112,6 +139,9 @@ def get( RegionInstanceGroups.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -134,23 +164,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}".format( - host=self._host, - project=request.project, - region=request.region, - instance_group=request.instance_group, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionInstanceGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionInstanceGroupRequest.to_json( + compute.GetRegionInstanceGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +225,12 @@ def get( response.content, ignore_unknown_fields=True ) - def list( + def _list( self, request: compute.ListRegionInstanceGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionInstanceGroupList: r"""Call the list method over HTTP. @@ -176,6 +241,9 @@ def list( RegionInstanceGroups.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -186,30 +254,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroups".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionInstanceGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionInstanceGroupsRequest.to_json( + compute.ListRegionInstanceGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionInstanceGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionInstanceGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionInstanceGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionInstanceGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionInstanceGroupsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -221,10 +314,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_instances( + def _list_instances( self, request: compute.ListInstancesRegionInstanceGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionInstanceGroupsListInstances: r"""Call the list instances method over HTTP. @@ -235,6 +330,9 @@ def list_instances( RegionInstanceGroups.ListInstances. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -243,44 +341,67 @@ def list_instances( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/listInstances", + "body": "region_instance_groups_list_instances_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListInstancesRegionInstanceGroupsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupsListInstancesRequest.to_json( - request.region_instance_groups_list_instances_request_resource, + compute.RegionInstanceGroupsListInstancesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/listInstances".format( - host=self._host, - project=request.project, - region=request.region, - instance_group=request.instance_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListInstancesRegionInstanceGroupsRequest.to_json( + compute.ListInstancesRegionInstanceGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListInstancesRegionInstanceGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListInstancesRegionInstanceGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListInstancesRegionInstanceGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListInstancesRegionInstanceGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListInstancesRegionInstanceGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -293,10 +414,12 @@ def list_instances( response.content, ignore_unknown_fields=True ) - def set_named_ports( + def _set_named_ports( self, request: compute.SetNamedPortsRegionInstanceGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set named ports method over HTTP. @@ -307,6 +430,9 @@ def set_named_ports( RegionInstanceGroups.SetNamedPorts. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -330,33 +456,67 @@ def set_named_ports( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/setNamedPorts", + "body": "region_instance_groups_set_named_ports_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("instance_group", "instanceGroup"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.SetNamedPortsRegionInstanceGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionInstanceGroupsSetNamedPortsRequest.to_json( - request.region_instance_groups_set_named_ports_request_resource, + compute.RegionInstanceGroupsSetNamedPortsRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/setNamedPorts".format( - host=self._host, - project=request.project, - region=request.region, - instance_group=request.instance_group, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetNamedPortsRegionInstanceGroupRequest.to_json( + compute.SetNamedPortsRegionInstanceGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetNamedPortsRegionInstanceGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -367,5 +527,37 @@ def set_named_ports( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def get( + self, + ) -> Callable[[compute.GetRegionInstanceGroupRequest], compute.InstanceGroup]: + return self._get + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionInstanceGroupsRequest], compute.RegionInstanceGroupList + ]: + return self._list + + @property + def list_instances( + self, + ) -> Callable[ + [compute.ListInstancesRegionInstanceGroupsRequest], + compute.RegionInstanceGroupsListInstances, + ]: + return self._list_instances + + @property + def set_named_ports( + self, + ) -> Callable[[compute.SetNamedPortsRegionInstanceGroupRequest], compute.Operation]: + return self._set_named_ports + + def close(self): + self._session.close() + __all__ = ("RegionInstanceGroupsRestTransport",) diff --git a/google/cloud/compute_v1/services/region_instances/client.py b/google/cloud/compute_v1/services/region_instances/client.py index f0a626248..38add6068 100644 --- a/google/cloud/compute_v1/services/region_instances/client.py +++ b/google/cloud/compute_v1/services/region_instances/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.types import compute from .transports.base import RegionInstancesTransport, DEFAULT_CLIENT_INFO from .transports.rest import RegionInstancesRestTransport @@ -262,8 +266,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -325,16 +336,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def bulk_insert( self, - request: compute.BulkInsertRegionInstanceRequest = None, + request: Union[compute.BulkInsertRegionInstanceRequest, dict] = None, *, project: str = None, region: str = None, bulk_insert_instance_resource_resource: compute.BulkInsertInstanceResource = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -342,7 +354,7 @@ def bulk_insert( specifies the number of instances to create. Args: - request (google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest): + request (Union[google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest, dict]): The request object. A request message for RegionInstances.BulkInsert. See the method description for details. @@ -427,6 +439,19 @@ def bulk_insert( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_instances/transports/base.py b/google/cloud/compute_v1/services/region_instances/transports/base.py index 3cb020b59..6eba2270c 100644 --- a/google/cloud/compute_v1/services/region_instances/transports/base.py +++ b/google/cloud/compute_v1/services/region_instances/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionInstancesTransport(abc.ABC): """Abstract transport class for RegionInstances.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -163,6 +127,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def bulk_insert( self, diff --git a/google/cloud/compute_v1/services/region_instances/transports/rest.py b/google/cloud/compute_v1/services/region_instances/transports/rest.py index a86bd9875..6b9ddd6d0 100644 --- a/google/cloud/compute_v1/services/region_instances/transports/rest.py +++ b/google/cloud/compute_v1/services/region_instances/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionInstancesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionInstancesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionInstancesRestTransport(RegionInstancesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def bulk_insert( + def _bulk_insert( self, request: compute.BulkInsertRegionInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the bulk insert method over HTTP. @@ -112,6 +139,9 @@ def bulk_insert( RegionInstances.BulkInsert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,30 +165,62 @@ def bulk_insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instances/bulkInsert", + "body": "bulk_insert_instance_resource_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.BulkInsertRegionInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.BulkInsertInstanceResource.to_json( - request.bulk_insert_instance_resource_resource, + compute.BulkInsertInstanceResource(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/instances/bulkInsert".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.BulkInsertRegionInstanceRequest.to_json( + compute.BulkInsertRegionInstanceRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.BulkInsertRegionInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -169,5 +231,14 @@ def bulk_insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def bulk_insert( + self, + ) -> Callable[[compute.BulkInsertRegionInstanceRequest], compute.Operation]: + return self._bulk_insert + + def close(self): + self._session.close() + __all__ = ("RegionInstancesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py b/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py index e9db050b8..e77f996f8 100644 --- a/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py +++ b/google/cloud/compute_v1/services/region_network_endpoint_groups/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_network_endpoint_groups import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO @@ -267,8 +271,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -330,16 +341,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionNetworkEndpointGroupRequest = None, + request: Union[compute.DeleteRegionNetworkEndpointGroupRequest, dict] = None, *, project: str = None, region: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -348,7 +360,7 @@ def delete( backend of a backend service. Args: - request (google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest, dict]): The request object. A request message for RegionNetworkEndpointGroups.Delete. See the method description for details. @@ -435,12 +447,12 @@ def delete( def get( self, - request: compute.GetRegionNetworkEndpointGroupRequest = None, + request: Union[compute.GetRegionNetworkEndpointGroupRequest, dict] = None, *, project: str = None, region: str = None, network_endpoint_group: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroup: @@ -449,7 +461,7 @@ def get( list() request. Args: - request (google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest, dict]): The request object. A request message for RegionNetworkEndpointGroups.Get. See the method description for details. @@ -531,12 +543,12 @@ def get( def insert( self, - request: compute.InsertRegionNetworkEndpointGroupRequest = None, + request: Union[compute.InsertRegionNetworkEndpointGroupRequest, dict] = None, *, project: str = None, region: str = None, network_endpoint_group_resource: compute.NetworkEndpointGroup = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -545,7 +557,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest, dict]): The request object. A request message for RegionNetworkEndpointGroups.Insert. See the method description for details. @@ -631,11 +643,11 @@ def insert( def list( self, - request: compute.ListRegionNetworkEndpointGroupsRequest = None, + request: Union[compute.ListRegionNetworkEndpointGroupsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -644,7 +656,7 @@ def list( region. Args: - request (google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest, dict]): The request object. A request message for RegionNetworkEndpointGroups.List. See the method description for details. @@ -713,6 +725,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py b/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py index 8781f8db1..c3c7dfad7 100644 --- a/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py +++ b/google/cloud/compute_v1/services/region_network_endpoint_groups/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NetworkEndpointGroupList]: + def pages(self) -> Iterator[compute.NetworkEndpointGroupList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NetworkEndpointGroup]: + def __iter__(self) -> Iterator[compute.NetworkEndpointGroup]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py b/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py index 29e5bef3d..1b537b38c 100644 --- a/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py +++ b/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionNetworkEndpointGroupsTransport(abc.ABC): """Abstract transport class for RegionNetworkEndpointGroups.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py b/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py index 73bf7b577..b81c63675 100644 --- a/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py +++ b/google/cloud/compute_v1/services/region_network_endpoint_groups/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionNetworkEndpointGroupsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionNetworkEndpointGroupsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionNetworkEndpointGroupsRestTransport(RegionNetworkEndpointGroupsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionNetworkEndpointGroups.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,58 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}".format( - host=self._host, - project=request.project, - region=request.region, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionNetworkEndpointGroupRequest.to_json( + compute.DeleteRegionNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionNetworkEndpointGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +226,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroup: r"""Call the get method over HTTP. @@ -177,6 +242,9 @@ def get( RegionNetworkEndpointGroups.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -196,23 +264,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}".format( - host=self._host, - project=request.project, - region=request.region, - network_endpoint_group=request.network_endpoint_group, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("network_endpoint_group", "networkEndpointGroup"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionNetworkEndpointGroupRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionNetworkEndpointGroupRequest.to_json( + compute.GetRegionNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -224,10 +325,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionNetworkEndpointGroupRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -238,6 +341,9 @@ def insert( RegionNetworkEndpointGroups.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -261,30 +367,64 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups", + "body": "network_endpoint_group_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionNetworkEndpointGroupRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NetworkEndpointGroup.to_json( - request.network_endpoint_group_resource, + compute.NetworkEndpointGroup(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionNetworkEndpointGroupRequest.to_json( + compute.InsertRegionNetworkEndpointGroupRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionNetworkEndpointGroupRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -295,10 +435,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionNetworkEndpointGroupsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NetworkEndpointGroupList: r"""Call the list method over HTTP. @@ -309,6 +451,9 @@ def list( RegionNetworkEndpointGroups.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -317,33 +462,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionNetworkEndpointGroupsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionNetworkEndpointGroupsRequest.to_json( + compute.ListRegionNetworkEndpointGroupsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionNetworkEndpointGroupsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionNetworkEndpointGroupsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionNetworkEndpointGroupsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionNetworkEndpointGroupsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListRegionNetworkEndpointGroupsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -355,5 +522,37 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionNetworkEndpointGroupRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionNetworkEndpointGroupRequest], compute.NetworkEndpointGroup + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionNetworkEndpointGroupRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionNetworkEndpointGroupsRequest], + compute.NetworkEndpointGroupList, + ]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RegionNetworkEndpointGroupsRestTransport",) diff --git a/google/cloud/compute_v1/services/region_notification_endpoints/client.py b/google/cloud/compute_v1/services/region_notification_endpoints/client.py index adf6de44d..8cf5e0187 100644 --- a/google/cloud/compute_v1/services/region_notification_endpoints/client.py +++ b/google/cloud/compute_v1/services/region_notification_endpoints/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_notification_endpoints import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionNotificationEndpointsTransport, DEFAULT_CLIENT_INFO @@ -267,8 +271,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -330,16 +341,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionNotificationEndpointRequest = None, + request: Union[compute.DeleteRegionNotificationEndpointRequest, dict] = None, *, project: str = None, region: str = None, notification_endpoint: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -347,7 +359,7 @@ def delete( given region Args: - request (google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest, dict]): The request object. A request message for RegionNotificationEndpoints.Delete. See the method description for details. @@ -432,12 +444,12 @@ def delete( def get( self, - request: compute.GetRegionNotificationEndpointRequest = None, + request: Union[compute.GetRegionNotificationEndpointRequest, dict] = None, *, project: str = None, region: str = None, notification_endpoint: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NotificationEndpoint: @@ -445,7 +457,7 @@ def get( in the given region. Args: - request (google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest): + request (Union[google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest, dict]): The request object. A request message for RegionNotificationEndpoints.Get. See the method description for details. @@ -522,12 +534,12 @@ def get( def insert( self, - request: compute.InsertRegionNotificationEndpointRequest = None, + request: Union[compute.InsertRegionNotificationEndpointRequest, dict] = None, *, project: str = None, region: str = None, notification_endpoint_resource: compute.NotificationEndpoint = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -536,7 +548,7 @@ def insert( are included in the request. Args: - request (google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest, dict]): The request object. A request message for RegionNotificationEndpoints.Insert. See the method description for details. @@ -619,11 +631,11 @@ def insert( def list( self, - request: compute.ListRegionNotificationEndpointsRequest = None, + request: Union[compute.ListRegionNotificationEndpointsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -631,7 +643,7 @@ def list( given region. Args: - request (google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest, dict]): The request object. A request message for RegionNotificationEndpoints.List. See the method description for details. @@ -699,6 +711,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py b/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py index 231e421a7..31a75ef7a 100644 --- a/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py +++ b/google/cloud/compute_v1/services/region_notification_endpoints/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.NotificationEndpointList]: + def pages(self) -> Iterator[compute.NotificationEndpointList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.NotificationEndpoint]: + def __iter__(self) -> Iterator[compute.NotificationEndpoint]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py b/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py index 20f7be9fc..5c262011c 100644 --- a/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py +++ b/google/cloud/compute_v1/services/region_notification_endpoints/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionNotificationEndpointsTransport(abc.ABC): """Abstract transport class for RegionNotificationEndpoints.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py b/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py index b89785bad..54ccf6e27 100644 --- a/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py +++ b/google/cloud/compute_v1/services/region_notification_endpoints/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionNotificationEndpointsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionNotificationEndpointsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionNotificationEndpointsRestTransport(RegionNotificationEndpointsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionNotificationEndpointRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionNotificationEndpoints.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,58 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}".format( - host=self._host, - project=request.project, - region=request.region, - notification_endpoint=request.notification_endpoint, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("notification_endpoint", "notificationEndpoint"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionNotificationEndpointRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionNotificationEndpointRequest.to_json( + compute.DeleteRegionNotificationEndpointRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionNotificationEndpointRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +226,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionNotificationEndpointRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NotificationEndpoint: r"""Call the get method over HTTP. @@ -177,6 +242,9 @@ def get( RegionNotificationEndpoints.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -192,23 +260,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}".format( - host=self._host, - project=request.project, - region=request.region, - notification_endpoint=request.notification_endpoint, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("notification_endpoint", "notificationEndpoint"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionNotificationEndpointRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionNotificationEndpointRequest.to_json( + compute.GetRegionNotificationEndpointRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -220,10 +321,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionNotificationEndpointRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -234,6 +337,9 @@ def insert( RegionNotificationEndpoints.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -257,30 +363,64 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/notificationEndpoints", + "body": "notification_endpoint_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionNotificationEndpointRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.NotificationEndpoint.to_json( - request.notification_endpoint_resource, + compute.NotificationEndpoint(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/notificationEndpoints".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionNotificationEndpointRequest.to_json( + compute.InsertRegionNotificationEndpointRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionNotificationEndpointRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -291,10 +431,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionNotificationEndpointsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.NotificationEndpointList: r"""Call the list method over HTTP. @@ -305,6 +447,9 @@ def list( RegionNotificationEndpoints.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -313,33 +458,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/notificationEndpoints".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/notificationEndpoints", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionNotificationEndpointsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionNotificationEndpointsRequest.to_json( + compute.ListRegionNotificationEndpointsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionNotificationEndpointsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionNotificationEndpointsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionNotificationEndpointsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionNotificationEndpointsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListRegionNotificationEndpointsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -351,5 +518,37 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionNotificationEndpointRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionNotificationEndpointRequest], compute.NotificationEndpoint + ]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionNotificationEndpointRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionNotificationEndpointsRequest], + compute.NotificationEndpointList, + ]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RegionNotificationEndpointsRestTransport",) diff --git a/google/cloud/compute_v1/services/region_operations/client.py b/google/cloud/compute_v1/services/region_operations/client.py index 8cb2b305c..d9c3880e3 100644 --- a/google/cloud/compute_v1/services/region_operations/client.py +++ b/google/cloud/compute_v1/services/region_operations/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_operations import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionOperationsTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,16 +337,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionOperationRequest = None, + request: Union[compute.DeleteRegionOperationRequest, dict] = None, *, project: str = None, region: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteRegionOperationResponse: @@ -343,7 +355,7 @@ def delete( resource. Args: - request (google.cloud.compute_v1.types.DeleteRegionOperationRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionOperationRequest, dict]): The request object. A request message for RegionOperations.Delete. See the method description for details. @@ -414,12 +426,12 @@ def delete( def get( self, - request: compute.GetRegionOperationRequest = None, + request: Union[compute.GetRegionOperationRequest, dict] = None, *, project: str = None, region: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -427,7 +439,7 @@ def get( resource. Args: - request (google.cloud.compute_v1.types.GetRegionOperationRequest): + request (Union[google.cloud.compute_v1.types.GetRegionOperationRequest, dict]): The request object. A request message for RegionOperations.Get. See the method description for details. @@ -510,11 +522,11 @@ def get( def list( self, - request: compute.ListRegionOperationsRequest = None, + request: Union[compute.ListRegionOperationsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -522,7 +534,7 @@ def list( within the specified region. Args: - request (google.cloud.compute_v1.types.ListRegionOperationsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionOperationsRequest, dict]): The request object. A request message for RegionOperations.List. See the method description for details. @@ -592,12 +604,12 @@ def list( def wait( self, - request: compute.WaitRegionOperationRequest = None, + request: Union[compute.WaitRegionOperationRequest, dict] = None, *, project: str = None, region: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -616,7 +628,7 @@ def wait( ``DONE``. Args: - request (google.cloud.compute_v1.types.WaitRegionOperationRequest): + request (Union[google.cloud.compute_v1.types.WaitRegionOperationRequest, dict]): The request object. A request message for RegionOperations.Wait. See the method description for details. @@ -697,6 +709,19 @@ def wait( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_operations/pagers.py b/google/cloud/compute_v1/services/region_operations/pagers.py index 15966eabd..87816b2de 100644 --- a/google/cloud/compute_v1/services/region_operations/pagers.py +++ b/google/cloud/compute_v1/services/region_operations/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.OperationList]: + def pages(self) -> Iterator[compute.OperationList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Operation]: + def __iter__(self) -> Iterator[compute.Operation]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_operations/transports/base.py b/google/cloud/compute_v1/services/region_operations/transports/base.py index 494a3bc85..30deaa14e 100644 --- a/google/cloud/compute_v1/services/region_operations/transports/base.py +++ b/google/cloud/compute_v1/services/region_operations/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionOperationsTransport(abc.ABC): """Abstract transport class for RegionOperations.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_operations/transports/rest.py b/google/cloud/compute_v1/services/region_operations/transports/rest.py index cbfbbafb1..0b863c793 100644 --- a/google/cloud/compute_v1/services/region_operations/transports/rest.py +++ b/google/cloud/compute_v1/services/region_operations/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionOperationsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionOperationsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionOperationsRestTransport(RegionOperationsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteRegionOperationResponse: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionOperations.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -123,23 +153,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/operations/{operation}".format( - host=self._host, - project=request.project, - region=request.region, - operation=request.operation, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.DeleteRegionOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionOperationRequest.to_json( + compute.DeleteRegionOperationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -151,10 +214,12 @@ def delete( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetRegionOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the get method over HTTP. @@ -165,6 +230,9 @@ def get( RegionOperations.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -188,23 +256,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/operations/{operation}".format( - host=self._host, - project=request.project, - region=request.region, - operation=request.operation, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionOperationRequest.to_json( + compute.GetRegionOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -214,10 +313,12 @@ def get( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionOperationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.OperationList: r"""Call the list method over HTTP. @@ -228,6 +329,9 @@ def list( RegionOperations.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -238,30 +342,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/operations".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/operations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionOperationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionOperationsRequest.to_json( + compute.ListRegionOperationsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionOperationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionOperationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionOperationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionOperationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionOperationsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -273,10 +400,12 @@ def list( response.content, ignore_unknown_fields=True ) - def wait( + def _wait( self, request: compute.WaitRegionOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the wait method over HTTP. @@ -287,6 +416,9 @@ def wait( RegionOperations.Wait. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -310,23 +442,54 @@ def wait( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/operations/{operation}/wait".format( - host=self._host, - project=request.project, - region=request.region, - operation=request.operation, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/operations/{operation}/wait", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.WaitRegionOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.WaitRegionOperationRequest.to_json( + compute.WaitRegionOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -336,5 +499,30 @@ def wait( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteRegionOperationRequest], compute.DeleteRegionOperationResponse + ]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetRegionOperationRequest], compute.Operation]: + return self._get + + @property + def list( + self, + ) -> Callable[[compute.ListRegionOperationsRequest], compute.OperationList]: + return self._list + + @property + def wait(self) -> Callable[[compute.WaitRegionOperationRequest], compute.Operation]: + return self._wait + + def close(self): + self._session.close() + __all__ = ("RegionOperationsRestTransport",) diff --git a/google/cloud/compute_v1/services/region_ssl_certificates/client.py b/google/cloud/compute_v1/services/region_ssl_certificates/client.py index 36c3e1b57..ea54ac14e 100644 --- a/google/cloud/compute_v1/services/region_ssl_certificates/client.py +++ b/google/cloud/compute_v1/services/region_ssl_certificates/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_ssl_certificates import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionSslCertificatesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,16 +339,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionSslCertificateRequest = None, + request: Union[compute.DeleteRegionSslCertificateRequest, dict] = None, *, project: str = None, region: str = None, ssl_certificate: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -345,7 +357,7 @@ def delete( region. Args: - request (google.cloud.compute_v1.types.DeleteRegionSslCertificateRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionSslCertificateRequest, dict]): The request object. A request message for RegionSslCertificates.Delete. See the method description for details. @@ -430,12 +442,12 @@ def delete( def get( self, - request: compute.GetRegionSslCertificateRequest = None, + request: Union[compute.GetRegionSslCertificateRequest, dict] = None, *, project: str = None, region: str = None, ssl_certificate: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificate: @@ -444,7 +456,7 @@ def get( certificates by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRegionSslCertificateRequest): + request (Union[google.cloud.compute_v1.types.GetRegionSslCertificateRequest, dict]): The request object. A request message for RegionSslCertificates.Get. See the method description for details. @@ -530,12 +542,12 @@ def get( def insert( self, - request: compute.InsertRegionSslCertificateRequest = None, + request: Union[compute.InsertRegionSslCertificateRequest, dict] = None, *, project: str = None, region: str = None, ssl_certificate_resource: compute.SslCertificate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -544,7 +556,7 @@ def insert( request Args: - request (google.cloud.compute_v1.types.InsertRegionSslCertificateRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionSslCertificateRequest, dict]): The request object. A request message for RegionSslCertificates.Insert. See the method description for details. @@ -627,11 +639,11 @@ def insert( def list( self, - request: compute.ListRegionSslCertificatesRequest = None, + request: Union[compute.ListRegionSslCertificatesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -640,7 +652,7 @@ def list( region. Args: - request (google.cloud.compute_v1.types.ListRegionSslCertificatesRequest): + request (Union[google.cloud.compute_v1.types.ListRegionSslCertificatesRequest, dict]): The request object. A request message for RegionSslCertificates.List. See the method description for details. @@ -710,6 +722,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py b/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py index b7d38b271..aea62835e 100644 --- a/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py +++ b/google/cloud/compute_v1/services/region_ssl_certificates/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SslCertificateList]: + def pages(self) -> Iterator[compute.SslCertificateList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.SslCertificate]: + def __iter__(self) -> Iterator[compute.SslCertificate]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py b/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py index 83d627233..bc5fb5c42 100644 --- a/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py +++ b/google/cloud/compute_v1/services/region_ssl_certificates/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionSslCertificatesTransport(abc.ABC): """Abstract transport class for RegionSslCertificates.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py b/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py index a6cf530b3..176535817 100644 --- a/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py +++ b/google/cloud/compute_v1/services/region_ssl_certificates/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionSslCertificatesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionSslCertificatesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionSslCertificatesRestTransport(RegionSslCertificatesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionSslCertificateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionSslCertificates.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}".format( - host=self._host, - project=request.project, - region=request.region, - ssl_certificate=request.ssl_certificate, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("ssl_certificate", "sslCertificate"), + ] + + request_kwargs = compute.DeleteRegionSslCertificateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionSslCertificateRequest.to_json( + compute.DeleteRegionSslCertificateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionSslCertificateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionSslCertificateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificate: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionSslCertificates.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -201,23 +267,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}".format( - host=self._host, - project=request.project, - region=request.region, - ssl_certificate=request.ssl_certificate, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("ssl_certificate", "sslCertificate"), + ] + + request_kwargs = compute.GetRegionSslCertificateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionSslCertificateRequest.to_json( + compute.GetRegionSslCertificateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -229,10 +328,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionSslCertificateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -243,6 +344,9 @@ def insert( RegionSslCertificates.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -266,30 +370,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/sslCertificates", + "body": "ssl_certificate_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionSslCertificateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SslCertificate.to_json( - request.ssl_certificate_resource, + compute.SslCertificate(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/sslCertificates".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionSslCertificateRequest.to_json( + compute.InsertRegionSslCertificateRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionSslCertificateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -300,10 +436,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionSslCertificatesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificateList: r"""Call the list method over HTTP. @@ -314,6 +452,9 @@ def list( RegionSslCertificates.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -324,30 +465,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/sslCertificates".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/sslCertificates", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionSslCertificatesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionSslCertificatesRequest.to_json( + compute.ListRegionSslCertificatesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionSslCertificatesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionSslCertificatesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionSslCertificatesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionSslCertificatesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionSslCertificatesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -359,5 +525,34 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionSslCertificateRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetRegionSslCertificateRequest], compute.SslCertificate]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionSslCertificateRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionSslCertificatesRequest], compute.SslCertificateList + ]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RegionSslCertificatesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_target_http_proxies/client.py b/google/cloud/compute_v1/services/region_target_http_proxies/client.py index ba7cfc837..8989c3672 100644 --- a/google/cloud/compute_v1/services/region_target_http_proxies/client.py +++ b/google/cloud/compute_v1/services/region_target_http_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_target_http_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionTargetHttpProxiesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,23 +339,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionTargetHttpProxyRequest = None, + request: Union[compute.DeleteRegionTargetHttpProxyRequest, dict] = None, *, project: str = None, region: str = None, target_http_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetHttpProxy resource. Args: - request (google.cloud.compute_v1.types.DeleteRegionTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetHttpProxyRequest, dict]): The request object. A request message for RegionTargetHttpProxies.Delete. See the method description for details. @@ -429,12 +441,12 @@ def delete( def get( self, - request: compute.GetRegionTargetHttpProxyRequest = None, + request: Union[compute.GetRegionTargetHttpProxyRequest, dict] = None, *, project: str = None, region: str = None, target_http_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxy: @@ -443,7 +455,7 @@ def get( proxies by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRegionTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.GetRegionTargetHttpProxyRequest, dict]): The request object. A request message for RegionTargetHttpProxies.Get. See the method description for details. @@ -526,12 +538,12 @@ def get( def insert( self, - request: compute.InsertRegionTargetHttpProxyRequest = None, + request: Union[compute.InsertRegionTargetHttpProxyRequest, dict] = None, *, project: str = None, region: str = None, target_http_proxy_resource: compute.TargetHttpProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -540,7 +552,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertRegionTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionTargetHttpProxyRequest, dict]): The request object. A request message for RegionTargetHttpProxies.Insert. See the method description for details. @@ -623,11 +635,11 @@ def insert( def list( self, - request: compute.ListRegionTargetHttpProxiesRequest = None, + request: Union[compute.ListRegionTargetHttpProxiesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -636,7 +648,7 @@ def list( region. Args: - request (google.cloud.compute_v1.types.ListRegionTargetHttpProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListRegionTargetHttpProxiesRequest, dict]): The request object. A request message for RegionTargetHttpProxies.List. See the method description for details. @@ -707,20 +719,20 @@ def list( def set_url_map( self, - request: compute.SetUrlMapRegionTargetHttpProxyRequest = None, + request: Union[compute.SetUrlMapRegionTargetHttpProxyRequest, dict] = None, *, project: str = None, region: str = None, target_http_proxy: str = None, url_map_reference_resource: compute.UrlMapReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the URL map for TargetHttpProxy. Args: - request (google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpProxyRequest, dict]): The request object. A request message for RegionTargetHttpProxies.SetUrlMap. See the method description for details. @@ -812,6 +824,19 @@ def set_url_map( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py b/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py index 3e9c44a7f..3b9fe2e3b 100644 --- a/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py +++ b/google/cloud/compute_v1/services/region_target_http_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetHttpProxyList]: + def pages(self) -> Iterator[compute.TargetHttpProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetHttpProxy]: + def __iter__(self) -> Iterator[compute.TargetHttpProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py b/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py index 0509d04f7..674efa0bd 100644 --- a/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/region_target_http_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionTargetHttpProxiesTransport(abc.ABC): """Abstract transport class for RegionTargetHttpProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py b/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py index 23e226df8..565d24962 100644 --- a/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/region_target_http_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionTargetHttpProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionTargetHttpProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionTargetHttpProxiesRestTransport(RegionTargetHttpProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionTargetHttpProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}".format( - host=self._host, - project=request.project, - region=request.region, - target_http_proxy=request.target_http_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.DeleteRegionTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionTargetHttpProxyRequest.to_json( + compute.DeleteRegionTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxy: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionTargetHttpProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -198,23 +264,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}".format( - host=self._host, - project=request.project, - region=request.region, - target_http_proxy=request.target_http_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.GetRegionTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionTargetHttpProxyRequest.to_json( + compute.GetRegionTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -226,10 +325,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -240,6 +341,9 @@ def insert( RegionTargetHttpProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -263,30 +367,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpProxies", + "body": "target_http_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpProxy.to_json( - request.target_http_proxy_resource, + compute.TargetHttpProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpProxies".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionTargetHttpProxyRequest.to_json( + compute.InsertRegionTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -297,10 +433,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionTargetHttpProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxyList: r"""Call the list method over HTTP. @@ -311,6 +449,9 @@ def list( RegionTargetHttpProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -319,30 +460,55 @@ def list( A list of TargetHttpProxy resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpProxies".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionTargetHttpProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionTargetHttpProxiesRequest.to_json( + compute.ListRegionTargetHttpProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionTargetHttpProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionTargetHttpProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionTargetHttpProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionTargetHttpProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionTargetHttpProxiesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -354,10 +520,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_url_map( + def _set_url_map( self, request: compute.SetUrlMapRegionTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set url map method over HTTP. @@ -368,6 +536,9 @@ def set_url_map( RegionTargetHttpProxies.SetUrlMap. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -391,33 +562,63 @@ def set_url_map( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}/setUrlMap", + "body": "url_map_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.SetUrlMapRegionTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMapReference.to_json( - request.url_map_reference_resource, + compute.UrlMapReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}/setUrlMap".format( - host=self._host, - project=request.project, - region=request.region, - target_http_proxy=request.target_http_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetUrlMapRegionTargetHttpProxyRequest.to_json( + compute.SetUrlMapRegionTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetUrlMapRegionTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -428,5 +629,40 @@ def set_url_map( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionTargetHttpProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetRegionTargetHttpProxyRequest], compute.TargetHttpProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionTargetHttpProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionTargetHttpProxiesRequest], compute.TargetHttpProxyList + ]: + return self._list + + @property + def set_url_map( + self, + ) -> Callable[[compute.SetUrlMapRegionTargetHttpProxyRequest], compute.Operation]: + return self._set_url_map + + def close(self): + self._session.close() + __all__ = ("RegionTargetHttpProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_target_https_proxies/client.py b/google/cloud/compute_v1/services/region_target_https_proxies/client.py index dbb922c85..97424d473 100644 --- a/google/cloud/compute_v1/services/region_target_https_proxies/client.py +++ b/google/cloud/compute_v1/services/region_target_https_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_target_https_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionTargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,23 +339,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionTargetHttpsProxyRequest = None, + request: Union[compute.DeleteRegionTargetHttpsProxyRequest, dict] = None, *, project: str = None, region: str = None, target_https_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetHttpsProxy resource. Args: - request (google.cloud.compute_v1.types.DeleteRegionTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionTargetHttpsProxyRequest, dict]): The request object. A request message for RegionTargetHttpsProxies.Delete. See the method description for details. @@ -429,12 +441,12 @@ def delete( def get( self, - request: compute.GetRegionTargetHttpsProxyRequest = None, + request: Union[compute.GetRegionTargetHttpsProxyRequest, dict] = None, *, project: str = None, region: str = None, target_https_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxy: @@ -443,7 +455,7 @@ def get( HTTP proxies by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRegionTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.GetRegionTargetHttpsProxyRequest, dict]): The request object. A request message for RegionTargetHttpsProxies.Get. See the method description for details. @@ -525,12 +537,12 @@ def get( def insert( self, - request: compute.InsertRegionTargetHttpsProxyRequest = None, + request: Union[compute.InsertRegionTargetHttpsProxyRequest, dict] = None, *, project: str = None, region: str = None, target_https_proxy_resource: compute.TargetHttpsProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -539,7 +551,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertRegionTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionTargetHttpsProxyRequest, dict]): The request object. A request message for RegionTargetHttpsProxies.Insert. See the method description for details. @@ -622,11 +634,11 @@ def insert( def list( self, - request: compute.ListRegionTargetHttpsProxiesRequest = None, + request: Union[compute.ListRegionTargetHttpsProxiesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -635,7 +647,7 @@ def list( region. Args: - request (google.cloud.compute_v1.types.ListRegionTargetHttpsProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListRegionTargetHttpsProxiesRequest, dict]): The request object. A request message for RegionTargetHttpsProxies.List. See the method description for details. @@ -707,20 +719,22 @@ def list( def set_ssl_certificates( self, - request: compute.SetSslCertificatesRegionTargetHttpsProxyRequest = None, + request: Union[ + compute.SetSslCertificatesRegionTargetHttpsProxyRequest, dict + ] = None, *, project: str = None, region: str = None, target_https_proxy: str = None, region_target_https_proxies_set_ssl_certificates_request_resource: compute.RegionTargetHttpsProxiesSetSslCertificatesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Replaces SslCertificates for TargetHttpsProxy. Args: - request (google.cloud.compute_v1.types.SetSslCertificatesRegionTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.SetSslCertificatesRegionTargetHttpsProxyRequest, dict]): The request object. A request message for RegionTargetHttpsProxies.SetSslCertificates. See the method description for details. @@ -826,20 +840,20 @@ def set_ssl_certificates( def set_url_map( self, - request: compute.SetUrlMapRegionTargetHttpsProxyRequest = None, + request: Union[compute.SetUrlMapRegionTargetHttpsProxyRequest, dict] = None, *, project: str = None, region: str = None, target_https_proxy: str = None, url_map_reference_resource: compute.UrlMapReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the URL map for TargetHttpsProxy. Args: - request (google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.SetUrlMapRegionTargetHttpsProxyRequest, dict]): The request object. A request message for RegionTargetHttpsProxies.SetUrlMap. See the method description for details. @@ -931,6 +945,19 @@ def set_url_map( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py b/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py index 6d15ff5c2..5fdec9e70 100644 --- a/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py +++ b/google/cloud/compute_v1/services/region_target_https_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetHttpsProxyList]: + def pages(self) -> Iterator[compute.TargetHttpsProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetHttpsProxy]: + def __iter__(self) -> Iterator[compute.TargetHttpsProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py b/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py index 02124d004..a14d6e74c 100644 --- a/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/region_target_https_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionTargetHttpsProxiesTransport(abc.ABC): """Abstract transport class for RegionTargetHttpsProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -180,6 +144,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py b/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py index f1dd47b53..a956b5992 100644 --- a/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/region_target_https_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionTargetHttpsProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionTargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionTargetHttpsProxiesRestTransport(RegionTargetHttpsProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionTargetHttpsProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}".format( - host=self._host, - project=request.project, - region=request.region, - target_https_proxy=request.target_https_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.DeleteRegionTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionTargetHttpsProxyRequest.to_json( + compute.DeleteRegionTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +224,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxy: r"""Call the get method over HTTP. @@ -177,6 +240,9 @@ def get( RegionTargetHttpsProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,23 +263,56 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}".format( - host=self._host, - project=request.project, - region=request.region, - target_https_proxy=request.target_https_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.GetRegionTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionTargetHttpsProxyRequest.to_json( + compute.GetRegionTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +324,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -239,6 +340,9 @@ def insert( RegionTargetHttpsProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -262,30 +366,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies", + "body": "target_https_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpsProxy.to_json( - request.target_https_proxy_resource, + compute.TargetHttpsProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionTargetHttpsProxyRequest.to_json( + compute.InsertRegionTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -296,10 +432,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionTargetHttpsProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxyList: r"""Call the list method over HTTP. @@ -310,6 +448,9 @@ def list( RegionTargetHttpsProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -320,33 +461,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionTargetHttpsProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionTargetHttpsProxiesRequest.to_json( + compute.ListRegionTargetHttpsProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionTargetHttpsProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionTargetHttpsProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionTargetHttpsProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionTargetHttpsProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListRegionTargetHttpsProxiesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -358,10 +521,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_ssl_certificates( + def _set_ssl_certificates( self, request: compute.SetSslCertificatesRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set ssl certificates method over HTTP. @@ -372,6 +537,9 @@ def set_ssl_certificates( RegionTargetHttpsProxies.SetSslCertificates. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -395,36 +563,67 @@ def set_ssl_certificates( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setSslCertificates", + "body": "region_target_https_proxies_set_ssl_certificates_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.SetSslCertificatesRegionTargetHttpsProxyRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionTargetHttpsProxiesSetSslCertificatesRequest.to_json( - request.region_target_https_proxies_set_ssl_certificates_request_resource, + compute.RegionTargetHttpsProxiesSetSslCertificatesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setSslCertificates".format( - host=self._host, - project=request.project, - region=request.region, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSslCertificatesRegionTargetHttpsProxyRequest.to_json( + compute.SetSslCertificatesRegionTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.SetSslCertificatesRegionTargetHttpsProxyRequest.request_id - in request - ): - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -435,10 +634,12 @@ def set_ssl_certificates( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_url_map( + def _set_url_map( self, request: compute.SetUrlMapRegionTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set url map method over HTTP. @@ -449,6 +650,9 @@ def set_url_map( RegionTargetHttpsProxies.SetUrlMap. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -472,33 +676,63 @@ def set_url_map( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setUrlMap", + "body": "url_map_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.SetUrlMapRegionTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMapReference.to_json( - request.url_map_reference_resource, + compute.UrlMapReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setUrlMap".format( - host=self._host, - project=request.project, - region=request.region, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetUrlMapRegionTargetHttpsProxyRequest.to_json( + compute.SetUrlMapRegionTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetUrlMapRegionTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -509,5 +743,48 @@ def set_url_map( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionTargetHttpsProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetRegionTargetHttpsProxyRequest], compute.TargetHttpsProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionTargetHttpsProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionTargetHttpsProxiesRequest], compute.TargetHttpsProxyList + ]: + return self._list + + @property + def set_ssl_certificates( + self, + ) -> Callable[ + [compute.SetSslCertificatesRegionTargetHttpsProxyRequest], compute.Operation + ]: + return self._set_ssl_certificates + + @property + def set_url_map( + self, + ) -> Callable[[compute.SetUrlMapRegionTargetHttpsProxyRequest], compute.Operation]: + return self._set_url_map + + def close(self): + self._session.close() + __all__ = ("RegionTargetHttpsProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/region_url_maps/client.py b/google/cloud/compute_v1/services/region_url_maps/client.py index 5d19110a5..547066927 100644 --- a/google/cloud/compute_v1/services/region_url_maps/client.py +++ b/google/cloud/compute_v1/services/region_url_maps/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.region_url_maps import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionUrlMapsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,23 +335,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRegionUrlMapRequest = None, + request: Union[compute.DeleteRegionUrlMapRequest, dict] = None, *, project: str = None, region: str = None, url_map: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified UrlMap resource. Args: - request (google.cloud.compute_v1.types.DeleteRegionUrlMapRequest): + request (Union[google.cloud.compute_v1.types.DeleteRegionUrlMapRequest, dict]): The request object. A request message for RegionUrlMaps.Delete. See the method description for details. @@ -425,12 +437,12 @@ def delete( def get( self, - request: compute.GetRegionUrlMapRequest = None, + request: Union[compute.GetRegionUrlMapRequest, dict] = None, *, project: str = None, region: str = None, url_map: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMap: @@ -438,7 +450,7 @@ def get( available URL maps by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRegionUrlMapRequest): + request (Union[google.cloud.compute_v1.types.GetRegionUrlMapRequest, dict]): The request object. A request message for RegionUrlMaps.Get. See the method description for details. @@ -529,12 +541,12 @@ def get( def insert( self, - request: compute.InsertRegionUrlMapRequest = None, + request: Union[compute.InsertRegionUrlMapRequest, dict] = None, *, project: str = None, region: str = None, url_map_resource: compute.UrlMap = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -542,7 +554,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRegionUrlMapRequest): + request (Union[google.cloud.compute_v1.types.InsertRegionUrlMapRequest, dict]): The request object. A request message for RegionUrlMaps.Insert. See the method description for details. @@ -625,11 +637,11 @@ def insert( def list( self, - request: compute.ListRegionUrlMapsRequest = None, + request: Union[compute.ListRegionUrlMapsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -637,7 +649,7 @@ def list( the specified project in the specified region. Args: - request (google.cloud.compute_v1.types.ListRegionUrlMapsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionUrlMapsRequest, dict]): The request object. A request message for RegionUrlMaps.List. See the method description for details. @@ -708,13 +720,13 @@ def list( def patch( self, - request: compute.PatchRegionUrlMapRequest = None, + request: Union[compute.PatchRegionUrlMapRequest, dict] = None, *, project: str = None, region: str = None, url_map: str = None, url_map_resource: compute.UrlMap = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -724,7 +736,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchRegionUrlMapRequest): + request (Union[google.cloud.compute_v1.types.PatchRegionUrlMapRequest, dict]): The request object. A request message for RegionUrlMaps.Patch. See the method description for details. @@ -814,13 +826,13 @@ def patch( def update( self, - request: compute.UpdateRegionUrlMapRequest = None, + request: Union[compute.UpdateRegionUrlMapRequest, dict] = None, *, project: str = None, region: str = None, url_map: str = None, url_map_resource: compute.UrlMap = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -828,7 +840,7 @@ def update( included in the request. Args: - request (google.cloud.compute_v1.types.UpdateRegionUrlMapRequest): + request (Union[google.cloud.compute_v1.types.UpdateRegionUrlMapRequest, dict]): The request object. A request message for RegionUrlMaps.Update. See the method description for details. @@ -920,13 +932,13 @@ def update( def validate( self, - request: compute.ValidateRegionUrlMapRequest = None, + request: Union[compute.ValidateRegionUrlMapRequest, dict] = None, *, project: str = None, region: str = None, url_map: str = None, region_url_maps_validate_request_resource: compute.RegionUrlMapsValidateRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapsValidateResponse: @@ -935,7 +947,7 @@ def validate( this method does NOT create the UrlMap. Args: - request (google.cloud.compute_v1.types.ValidateRegionUrlMapRequest): + request (Union[google.cloud.compute_v1.types.ValidateRegionUrlMapRequest, dict]): The request object. A request message for RegionUrlMaps.Validate. See the method description for details. @@ -1014,6 +1026,19 @@ def validate( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/region_url_maps/pagers.py b/google/cloud/compute_v1/services/region_url_maps/pagers.py index c71549e26..5422a646f 100644 --- a/google/cloud/compute_v1/services/region_url_maps/pagers.py +++ b/google/cloud/compute_v1/services/region_url_maps/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.UrlMapList]: + def pages(self) -> Iterator[compute.UrlMapList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.UrlMap]: + def __iter__(self) -> Iterator[compute.UrlMap]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/region_url_maps/transports/base.py b/google/cloud/compute_v1/services/region_url_maps/transports/base.py index d6236858b..345ee52f0 100644 --- a/google/cloud/compute_v1/services/region_url_maps/transports/base.py +++ b/google/cloud/compute_v1/services/region_url_maps/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionUrlMapsTransport(abc.ABC): """Abstract transport class for RegionUrlMaps.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -181,6 +145,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/region_url_maps/transports/rest.py b/google/cloud/compute_v1/services/region_url_maps/transports/rest.py index 3d60692ac..f1bf4e1b5 100644 --- a/google/cloud/compute_v1/services/region_url_maps/transports/rest.py +++ b/google/cloud/compute_v1/services/region_url_maps/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + RegionUrlMapsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import RegionUrlMapsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionUrlMapsRestTransport(RegionUrlMapsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRegionUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( RegionUrlMaps.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,25 +165,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}".format( - host=self._host, - project=request.project, - region=request.region, - url_map=request.url_map, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.DeleteRegionUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRegionUrlMapRequest.to_json( + compute.DeleteRegionUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRegionUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +222,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRegionUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMap: r"""Call the get method over HTTP. @@ -177,6 +238,9 @@ def get( RegionUrlMaps.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -206,23 +270,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}".format( - host=self._host, - project=request.project, - region=request.region, - url_map=request.url_map, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.GetRegionUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionUrlMapRequest.to_json( + compute.GetRegionUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -232,10 +327,12 @@ def get( # Return the response return compute.UrlMap.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertRegionUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -246,6 +343,9 @@ def insert( RegionUrlMaps.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -269,30 +369,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps", + "body": "url_map_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRegionUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMap.to_json( - request.url_map_resource, + compute.UrlMap(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRegionUrlMapRequest.to_json( + compute.InsertRegionUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRegionUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -303,10 +433,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionUrlMapsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapList: r"""Call the list method over HTTP. @@ -317,6 +449,9 @@ def list( RegionUrlMaps.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -325,30 +460,53 @@ def list( Contains a list of UrlMap resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRegionUrlMapsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionUrlMapsRequest.to_json( + compute.ListRegionUrlMapsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionUrlMapsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionUrlMapsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionUrlMapsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionUrlMapsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionUrlMapsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -360,10 +518,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRegionUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -374,6 +534,9 @@ def patch( RegionUrlMaps.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -397,33 +560,61 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}", + "body": "url_map_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.PatchRegionUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMap.to_json( - request.url_map_resource, + compute.UrlMap(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}".format( - host=self._host, - project=request.project, - region=request.region, - url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRegionUrlMapRequest.to_json( + compute.PatchRegionUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRegionUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -434,10 +625,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateRegionUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -448,6 +641,9 @@ def update( RegionUrlMaps.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -471,33 +667,61 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}", + "body": "url_map_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.UpdateRegionUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMap.to_json( - request.url_map_resource, + compute.UrlMap(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}".format( - host=self._host, - project=request.project, - region=request.region, - url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateRegionUrlMapRequest.to_json( + compute.UpdateRegionUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateRegionUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -508,10 +732,12 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def validate( + def _validate( self, request: compute.ValidateRegionUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapsValidateResponse: r"""Call the validate method over HTTP. @@ -522,6 +748,9 @@ def validate( RegionUrlMaps.Validate. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -530,31 +759,61 @@ def validate( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}/validate", + "body": "region_url_maps_validate_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.ValidateRegionUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionUrlMapsValidateRequest.to_json( - request.region_url_maps_validate_request_resource, + compute.RegionUrlMapsValidateRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}/validate".format( - host=self._host, - project=request.project, - region=request.region, - url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ValidateRegionUrlMapRequest.to_json( + compute.ValidateRegionUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -567,5 +826,46 @@ def validate( response.content, ignore_unknown_fields=True ) + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionUrlMapRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetRegionUrlMapRequest], compute.UrlMap]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionUrlMapRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListRegionUrlMapsRequest], compute.UrlMapList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchRegionUrlMapRequest], compute.Operation]: + return self._patch + + @property + def update( + self, + ) -> Callable[[compute.UpdateRegionUrlMapRequest], compute.Operation]: + return self._update + + @property + def validate( + self, + ) -> Callable[ + [compute.ValidateRegionUrlMapRequest], compute.UrlMapsValidateResponse + ]: + return self._validate + + def close(self): + self._session.close() + __all__ = ("RegionUrlMapsRestTransport",) diff --git a/google/cloud/compute_v1/services/regions/client.py b/google/cloud/compute_v1/services/regions/client.py index 113cd2c2c..7e7ce9797 100644 --- a/google/cloud/compute_v1/services/regions/client.py +++ b/google/cloud/compute_v1/services/regions/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.regions import pagers from google.cloud.compute_v1.types import compute from .transports.base import RegionsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,15 +335,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetRegionRequest = None, + request: Union[compute.GetRegionRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Region: @@ -340,7 +352,7 @@ def get( available regions by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRegionRequest): + request (Union[google.cloud.compute_v1.types.GetRegionRequest, dict]): The request object. A request message for Regions.Get. See the method description for details. project (str): @@ -404,10 +416,10 @@ def get( def list( self, - request: compute.ListRegionsRequest = None, + request: Union[compute.ListRegionsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -415,7 +427,7 @@ def list( the specified project. Args: - request (google.cloud.compute_v1.types.ListRegionsRequest): + request (Union[google.cloud.compute_v1.types.ListRegionsRequest, dict]): The request object. A request message for Regions.List. See the method description for details. project (str): @@ -474,6 +486,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/regions/pagers.py b/google/cloud/compute_v1/services/regions/pagers.py index 1af84b165..45fa32226 100644 --- a/google/cloud/compute_v1/services/regions/pagers.py +++ b/google/cloud/compute_v1/services/regions/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RegionList]: + def pages(self) -> Iterator[compute.RegionList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Region]: + def __iter__(self) -> Iterator[compute.Region]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/regions/transports/base.py b/google/cloud/compute_v1/services/regions/transports/base.py index c02266c47..4b41a657b 100644 --- a/google/cloud/compute_v1/services/regions/transports/base.py +++ b/google/cloud/compute_v1/services/regions/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RegionsTransport(abc.ABC): """Abstract transport class for Regions.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -167,6 +131,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/regions/transports/rest.py b/google/cloud/compute_v1/services/regions/transports/rest.py index e7f55df23..f9c60e2b4 100644 --- a/google/cloud/compute_v1/services/regions/transports/rest.py +++ b/google/cloud/compute_v1/services/regions/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import RegionsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import RegionsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RegionsRestTransport(RegionsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetRegionRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Region: r"""Call the get method over HTTP. @@ -111,6 +135,9 @@ def get( The request object. A request message for Regions.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -123,20 +150,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.GetRegionRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRegionRequest.to_json( + compute.GetRegionRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -146,10 +206,12 @@ def get( # Return the response return compute.Region.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRegionsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RegionList: r"""Call the list method over HTTP. @@ -159,6 +221,9 @@ def list( The request object. A request message for Regions.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -167,30 +232,49 @@ def list( Contains a list of region resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/regions",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListRegionsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRegionsRequest.to_json( + compute.ListRegionsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRegionsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRegionsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRegionsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRegionsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRegionsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -202,5 +286,16 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def get(self) -> Callable[[compute.GetRegionRequest], compute.Region]: + return self._get + + @property + def list(self) -> Callable[[compute.ListRegionsRequest], compute.RegionList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RegionsRestTransport",) diff --git a/google/cloud/compute_v1/services/reservations/client.py b/google/cloud/compute_v1/services/reservations/client.py index 323b647dd..bda1bebbd 100644 --- a/google/cloud/compute_v1/services/reservations/client.py +++ b/google/cloud/compute_v1/services/reservations/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.reservations import pagers from google.cloud.compute_v1.types import compute from .transports.base import ReservationsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListReservationsRequest = None, + request: Union[compute.AggregatedListReservationsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of reservations. Args: - request (google.cloud.compute_v1.types.AggregatedListReservationsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListReservationsRequest, dict]): The request object. A request message for Reservations.AggregatedList. See the method description for details. @@ -400,19 +412,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteReservationRequest = None, + request: Union[compute.DeleteReservationRequest, dict] = None, *, project: str = None, zone: str = None, reservation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified reservation. Args: - request (google.cloud.compute_v1.types.DeleteReservationRequest): + request (Union[google.cloud.compute_v1.types.DeleteReservationRequest, dict]): The request object. A request message for Reservations.Delete. See the method description for details. @@ -493,12 +505,12 @@ def delete( def get( self, - request: compute.GetReservationRequest = None, + request: Union[compute.GetReservationRequest, dict] = None, *, project: str = None, zone: str = None, reservation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Reservation: @@ -506,7 +518,7 @@ def get( reservation. Args: - request (google.cloud.compute_v1.types.GetReservationRequest): + request (Union[google.cloud.compute_v1.types.GetReservationRequest, dict]): The request object. A request message for Reservations.Get. See the method description for details. @@ -578,12 +590,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyReservationRequest = None, + request: Union[compute.GetIamPolicyReservationRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -591,7 +603,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyReservationRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyReservationRequest, dict]): The request object. A request message for Reservations.GetIamPolicy. See the method description for details. @@ -700,12 +712,12 @@ def get_iam_policy( def insert( self, - request: compute.InsertReservationRequest = None, + request: Union[compute.InsertReservationRequest, dict] = None, *, project: str = None, zone: str = None, reservation_resource: compute.Reservation = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -713,7 +725,7 @@ def insert( Reserving zonal resources. Args: - request (google.cloud.compute_v1.types.InsertReservationRequest): + request (Union[google.cloud.compute_v1.types.InsertReservationRequest, dict]): The request object. A request message for Reservations.Insert. See the method description for details. @@ -794,11 +806,11 @@ def insert( def list( self, - request: compute.ListReservationsRequest = None, + request: Union[compute.ListReservationsRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -806,7 +818,7 @@ def list( configured for the specified project in specified zone. Args: - request (google.cloud.compute_v1.types.ListReservationsRequest): + request (Union[google.cloud.compute_v1.types.ListReservationsRequest, dict]): The request object. A request message for Reservations.List. See the method description for details. @@ -874,13 +886,13 @@ def list( def resize( self, - request: compute.ResizeReservationRequest = None, + request: Union[compute.ResizeReservationRequest, dict] = None, *, project: str = None, zone: str = None, reservation: str = None, reservations_resize_request_resource: compute.ReservationsResizeRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -889,7 +901,7 @@ def resize( reservations. Args: - request (google.cloud.compute_v1.types.ResizeReservationRequest): + request (Union[google.cloud.compute_v1.types.ResizeReservationRequest, dict]): The request object. A request message for Reservations.Resize. See the method description for details. @@ -981,13 +993,13 @@ def resize( def set_iam_policy( self, - request: compute.SetIamPolicyReservationRequest = None, + request: Union[compute.SetIamPolicyReservationRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, zone_set_policy_request_resource: compute.ZoneSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -995,7 +1007,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyReservationRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyReservationRequest, dict]): The request object. A request message for Reservations.SetIamPolicy. See the method description for details. @@ -1115,13 +1127,13 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsReservationRequest = None, + request: Union[compute.TestIamPermissionsReservationRequest, dict] = None, *, project: str = None, zone: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1129,7 +1141,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsReservationRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsReservationRequest, dict]): The request object. A request message for Reservations.TestIamPermissions. See the method description for details. @@ -1208,6 +1220,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/reservations/pagers.py b/google/cloud/compute_v1/services/reservations/pagers.py index a8f5361a3..f2aeefe8f 100644 --- a/google/cloud/compute_v1/services/reservations/pagers.py +++ b/google/cloud/compute_v1/services/reservations/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ReservationAggregatedList]: + def pages(self) -> Iterator[compute.ReservationAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.ReservationsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.ReservationsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ReservationList]: + def pages(self) -> Iterator[compute.ReservationList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Reservation]: + def __iter__(self) -> Iterator[compute.Reservation]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/reservations/transports/base.py b/google/cloud/compute_v1/services/reservations/transports/base.py index 83b8f4136..fa8814e62 100644 --- a/google/cloud/compute_v1/services/reservations/transports/base.py +++ b/google/cloud/compute_v1/services/reservations/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ReservationsTransport(abc.ABC): """Abstract transport class for Reservations.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -189,6 +153,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/reservations/transports/rest.py b/google/cloud/compute_v1/services/reservations/transports/rest.py index 9cc7790bd..16ea490a8 100644 --- a/google/cloud/compute_v1/services/reservations/transports/rest.py +++ b/google/cloud/compute_v1/services/reservations/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ReservationsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import ReservationsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ReservationsRestTransport(ReservationsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListReservationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ReservationAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( Reservations.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( Contains a list of reservations. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/reservations".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/reservations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListReservationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListReservationsRequest.to_json( + compute.AggregatedListReservationsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListReservationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListReservationsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListReservationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListReservationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListReservationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListReservationsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( Reservations.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +248,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}".format( - host=self._host, - project=request.project, - zone=request.zone, - reservation=request.reservation, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("reservation", "reservation"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteReservationRequest.to_json( + compute.DeleteReservationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteReservationRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +305,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Reservation: r"""Call the get method over HTTP. @@ -236,6 +321,9 @@ def get( Reservations.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -250,23 +338,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}".format( - host=self._host, - project=request.project, - zone=request.zone, - reservation=request.reservation, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("reservation", "reservation"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetReservationRequest.to_json( + compute.GetReservationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -278,10 +397,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -292,6 +413,9 @@ def get_iam_policy( Reservations.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -340,30 +464,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetIamPolicyReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyReservationRequest.to_json( + compute.GetIamPolicyReservationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyReservationRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -373,10 +523,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -387,6 +539,9 @@ def insert( Reservations.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -410,30 +565,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations", + "body": "reservation_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Reservation.to_json( - request.reservation_resource, + compute.Reservation(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertReservationRequest.to_json( + compute.InsertReservationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertReservationRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -444,10 +629,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListReservationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ReservationList: r"""Call the list method over HTTP. @@ -458,6 +645,9 @@ def list( Reservations.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -466,30 +656,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListReservationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListReservationsRequest.to_json( + compute.ListReservationsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListReservationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListReservationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListReservationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListReservationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListReservationsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -501,10 +714,12 @@ def list( response.content, ignore_unknown_fields=True ) - def resize( + def _resize( self, request: compute.ResizeReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the resize method over HTTP. @@ -515,6 +730,9 @@ def resize( Reservations.Resize. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -538,33 +756,61 @@ def resize( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}/resize", + "body": "reservations_resize_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("reservation", "reservation"), + ("zone", "zone"), + ] + + request_kwargs = compute.ResizeReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ReservationsResizeRequest.to_json( - request.reservations_resize_request_resource, + compute.ReservationsResizeRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}/resize".format( - host=self._host, - project=request.project, - zone=request.zone, - reservation=request.reservation, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ResizeReservationRequest.to_json( + compute.ResizeReservationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ResizeReservationRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -575,10 +821,12 @@ def resize( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -589,6 +837,9 @@ def set_iam_policy( Reservations.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -637,31 +888,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/setIamPolicy", + "body": "zone_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.SetIamPolicyReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ZoneSetPolicyRequest.to_json( - request.zone_set_policy_request_resource, + compute.ZoneSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyReservationRequest.to_json( + compute.SetIamPolicyReservationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -672,10 +955,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsReservationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -686,6 +971,9 @@ def test_iam_permissions( Reservations.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -694,31 +982,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ("zone", "zone"), + ] + + request_kwargs = compute.TestIamPermissionsReservationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - zone=request.zone, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsReservationRequest.to_json( + compute.TestIamPermissionsReservationRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -731,5 +1051,58 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListReservationsRequest], compute.ReservationAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteReservationRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetReservationRequest], compute.Reservation]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyReservationRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertReservationRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListReservationsRequest], compute.ReservationList]: + return self._list + + @property + def resize(self) -> Callable[[compute.ResizeReservationRequest], compute.Operation]: + return self._resize + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyReservationRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsReservationRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("ReservationsRestTransport",) diff --git a/google/cloud/compute_v1/services/resource_policies/client.py b/google/cloud/compute_v1/services/resource_policies/client.py index a21ede291..97a8e6db3 100644 --- a/google/cloud/compute_v1/services/resource_policies/client.py +++ b/google/cloud/compute_v1/services/resource_policies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.resource_policies import pagers from google.cloud.compute_v1.types import compute from .transports.base import ResourcePoliciesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,21 +337,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListResourcePoliciesRequest = None, + request: Union[compute.AggregatedListResourcePoliciesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of resource policies. Args: - request (google.cloud.compute_v1.types.AggregatedListResourcePoliciesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListResourcePoliciesRequest, dict]): The request object. A request message for ResourcePolicies.AggregatedList. See the method description for details. @@ -402,19 +414,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteResourcePolicyRequest = None, + request: Union[compute.DeleteResourcePolicyRequest, dict] = None, *, project: str = None, region: str = None, resource_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified resource policy. Args: - request (google.cloud.compute_v1.types.DeleteResourcePolicyRequest): + request (Union[google.cloud.compute_v1.types.DeleteResourcePolicyRequest, dict]): The request object. A request message for ResourcePolicies.Delete. See the method description for details. @@ -497,12 +509,12 @@ def delete( def get( self, - request: compute.GetResourcePolicyRequest = None, + request: Union[compute.GetResourcePolicyRequest, dict] = None, *, project: str = None, region: str = None, resource_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ResourcePolicy: @@ -510,7 +522,7 @@ def get( policy. Args: - request (google.cloud.compute_v1.types.GetResourcePolicyRequest): + request (Union[google.cloud.compute_v1.types.GetResourcePolicyRequest, dict]): The request object. A request message for ResourcePolicies.Get. See the method description for details. @@ -584,12 +596,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyResourcePolicyRequest = None, + request: Union[compute.GetIamPolicyResourcePolicyRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -597,7 +609,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyResourcePolicyRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyResourcePolicyRequest, dict]): The request object. A request message for ResourcePolicies.GetIamPolicy. See the method description for details. @@ -706,19 +718,19 @@ def get_iam_policy( def insert( self, - request: compute.InsertResourcePolicyRequest = None, + request: Union[compute.InsertResourcePolicyRequest, dict] = None, *, project: str = None, region: str = None, resource_policy_resource: compute.ResourcePolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Creates a new resource policy. Args: - request (google.cloud.compute_v1.types.InsertResourcePolicyRequest): + request (Union[google.cloud.compute_v1.types.InsertResourcePolicyRequest, dict]): The request object. A request message for ResourcePolicies.Insert. See the method description for details. @@ -799,11 +811,11 @@ def insert( def list( self, - request: compute.ListResourcePoliciesRequest = None, + request: Union[compute.ListResourcePoliciesRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -812,7 +824,7 @@ def list( region. Args: - request (google.cloud.compute_v1.types.ListResourcePoliciesRequest): + request (Union[google.cloud.compute_v1.types.ListResourcePoliciesRequest, dict]): The request object. A request message for ResourcePolicies.List. See the method description for details. @@ -880,13 +892,13 @@ def list( def set_iam_policy( self, - request: compute.SetIamPolicyResourcePolicyRequest = None, + request: Union[compute.SetIamPolicyResourcePolicyRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_policy_request_resource: compute.RegionSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -894,7 +906,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyResourcePolicyRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyResourcePolicyRequest, dict]): The request object. A request message for ResourcePolicies.SetIamPolicy. See the method description for details. @@ -1014,13 +1026,13 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsResourcePolicyRequest = None, + request: Union[compute.TestIamPermissionsResourcePolicyRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1028,7 +1040,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsResourcePolicyRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsResourcePolicyRequest, dict]): The request object. A request message for ResourcePolicies.TestIamPermissions. See the method description for details. @@ -1107,6 +1119,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/resource_policies/pagers.py b/google/cloud/compute_v1/services/resource_policies/pagers.py index 7cc063099..19b829a91 100644 --- a/google/cloud/compute_v1/services/resource_policies/pagers.py +++ b/google/cloud/compute_v1/services/resource_policies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ResourcePolicyAggregatedList]: + def pages(self) -> Iterator[compute.ResourcePolicyAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.ResourcePoliciesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.ResourcePoliciesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ResourcePolicyList]: + def pages(self) -> Iterator[compute.ResourcePolicyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ResourcePolicy]: + def __iter__(self) -> Iterator[compute.ResourcePolicy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/resource_policies/transports/base.py b/google/cloud/compute_v1/services/resource_policies/transports/base.py index b4846ad70..ca462429b 100644 --- a/google/cloud/compute_v1/services/resource_policies/transports/base.py +++ b/google/cloud/compute_v1/services/resource_policies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ResourcePoliciesTransport(abc.ABC): """Abstract transport class for ResourcePolicies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -186,6 +150,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/resource_policies/transports/rest.py b/google/cloud/compute_v1/services/resource_policies/transports/rest.py index afa2fd668..49c61804a 100644 --- a/google/cloud/compute_v1/services/resource_policies/transports/rest.py +++ b/google/cloud/compute_v1/services/resource_policies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + ResourcePoliciesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import ResourcePoliciesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ResourcePoliciesRestTransport(ResourcePoliciesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListResourcePoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ResourcePolicyAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( ResourcePolicies.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( Contains a list of resourcePolicies. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/resourcePolicies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/resourcePolicies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListResourcePoliciesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListResourcePoliciesRequest.to_json( + compute.AggregatedListResourcePoliciesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListResourcePoliciesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListResourcePoliciesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListResourcePoliciesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListResourcePoliciesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListResourcePoliciesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListResourcePoliciesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteResourcePolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( ResourcePolicies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,25 +251,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}".format( - host=self._host, - project=request.project, - region=request.region, - resource_policy=request.resource_policy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource_policy", "resourcePolicy"), + ] + + request_kwargs = compute.DeleteResourcePolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteResourcePolicyRequest.to_json( + compute.DeleteResourcePolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteResourcePolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +308,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetResourcePolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ResourcePolicy: r"""Call the get method over HTTP. @@ -239,6 +324,9 @@ def get( ResourcePolicies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -253,23 +341,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}".format( - host=self._host, - project=request.project, - region=request.region, - resource_policy=request.resource_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource_policy", "resourcePolicy"), + ] + + request_kwargs = compute.GetResourcePolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetResourcePolicyRequest.to_json( + compute.GetResourcePolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -281,10 +400,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyResourcePolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -295,6 +416,9 @@ def get_iam_policy( ResourcePolicies.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -343,30 +467,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyResourcePolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyResourcePolicyRequest.to_json( + compute.GetIamPolicyResourcePolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyResourcePolicyRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -376,10 +526,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertResourcePolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -390,6 +542,9 @@ def insert( ResourcePolicies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -413,30 +568,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies", + "body": "resource_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertResourcePolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ResourcePolicy.to_json( - request.resource_policy_resource, + compute.ResourcePolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertResourcePolicyRequest.to_json( + compute.InsertResourcePolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertResourcePolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -447,10 +632,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListResourcePoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ResourcePolicyList: r"""Call the list method over HTTP. @@ -461,6 +648,9 @@ def list( ResourcePolicies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -469,30 +659,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListResourcePoliciesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListResourcePoliciesRequest.to_json( + compute.ListResourcePoliciesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListResourcePoliciesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListResourcePoliciesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListResourcePoliciesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListResourcePoliciesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListResourcePoliciesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -504,10 +717,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyResourcePolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -518,6 +733,9 @@ def set_iam_policy( ResourcePolicies.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -566,31 +784,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyResourcePolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetPolicyRequest.to_json( - request.region_set_policy_request_resource, + compute.RegionSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyResourcePolicyRequest.to_json( + compute.SetIamPolicyResourcePolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -601,10 +851,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsResourcePolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -615,6 +867,9 @@ def test_iam_permissions( ResourcePolicies.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -623,31 +878,65 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsResourcePolicyRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsResourcePolicyRequest.to_json( + compute.TestIamPermissionsResourcePolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -660,5 +949,62 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListResourcePoliciesRequest], + compute.ResourcePolicyAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteResourcePolicyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetResourcePolicyRequest], compute.ResourcePolicy]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyResourcePolicyRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert( + self, + ) -> Callable[[compute.InsertResourcePolicyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListResourcePoliciesRequest], compute.ResourcePolicyList]: + return self._list + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyResourcePolicyRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsResourcePolicyRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("ResourcePoliciesRestTransport",) diff --git a/google/cloud/compute_v1/services/routers/client.py b/google/cloud/compute_v1/services/routers/client.py index d1f5bd449..52f72fc7a 100644 --- a/google/cloud/compute_v1/services/routers/client.py +++ b/google/cloud/compute_v1/services/routers/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.routers import pagers from google.cloud.compute_v1.types import compute from .transports.base import RoutersTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListRoutersRequest = None, + request: Union[compute.AggregatedListRoutersRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of routers. Args: - request (google.cloud.compute_v1.types.AggregatedListRoutersRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListRoutersRequest, dict]): The request object. A request message for Routers.AggregatedList. See the method description for details. @@ -400,19 +412,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteRouterRequest = None, + request: Union[compute.DeleteRouterRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified Router resource. Args: - request (google.cloud.compute_v1.types.DeleteRouterRequest): + request (Union[google.cloud.compute_v1.types.DeleteRouterRequest, dict]): The request object. A request message for Routers.Delete. See the method description for details. project (str): @@ -494,12 +506,12 @@ def delete( def get( self, - request: compute.GetRouterRequest = None, + request: Union[compute.GetRouterRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Router: @@ -507,7 +519,7 @@ def get( available routers by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRouterRequest): + request (Union[google.cloud.compute_v1.types.GetRouterRequest, dict]): The request object. A request message for Routers.Get. See the method description for details. project (str): @@ -577,12 +589,12 @@ def get( def get_nat_mapping_info( self, - request: compute.GetNatMappingInfoRoutersRequest = None, + request: Union[compute.GetNatMappingInfoRoutersRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.GetNatMappingInfoPager: @@ -590,7 +602,7 @@ def get_nat_mapping_info( endpoints. Args: - request (google.cloud.compute_v1.types.GetNatMappingInfoRoutersRequest): + request (Union[google.cloud.compute_v1.types.GetNatMappingInfoRoutersRequest, dict]): The request object. A request message for Routers.GetNatMappingInfo. See the method description for details. @@ -670,12 +682,12 @@ def get_nat_mapping_info( def get_router_status( self, - request: compute.GetRouterStatusRouterRequest = None, + request: Union[compute.GetRouterStatusRouterRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RouterStatusResponse: @@ -683,7 +695,7 @@ def get_router_status( router. Args: - request (google.cloud.compute_v1.types.GetRouterStatusRouterRequest): + request (Union[google.cloud.compute_v1.types.GetRouterStatusRouterRequest, dict]): The request object. A request message for Routers.GetRouterStatus. See the method description for details. @@ -749,12 +761,12 @@ def get_router_status( def insert( self, - request: compute.InsertRouterRequest = None, + request: Union[compute.InsertRouterRequest, dict] = None, *, project: str = None, region: str = None, router_resource: compute.Router = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -762,7 +774,7 @@ def insert( and region using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRouterRequest): + request (Union[google.cloud.compute_v1.types.InsertRouterRequest, dict]): The request object. A request message for Routers.Insert. See the method description for details. project (str): @@ -842,11 +854,11 @@ def insert( def list( self, - request: compute.ListRoutersRequest = None, + request: Union[compute.ListRoutersRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -854,7 +866,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListRoutersRequest): + request (Union[google.cloud.compute_v1.types.ListRoutersRequest, dict]): The request object. A request message for Routers.List. See the method description for details. project (str): @@ -922,13 +934,13 @@ def list( def patch( self, - request: compute.PatchRouterRequest = None, + request: Union[compute.PatchRouterRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, router_resource: compute.Router = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -938,7 +950,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchRouterRequest): + request (Union[google.cloud.compute_v1.types.PatchRouterRequest, dict]): The request object. A request message for Routers.Patch. See the method description for details. project (str): @@ -1025,13 +1037,13 @@ def patch( def preview( self, - request: compute.PreviewRouterRequest = None, + request: Union[compute.PreviewRouterRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, router_resource: compute.Router = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RoutersPreviewResponse: @@ -1040,7 +1052,7 @@ def preview( create or update the router. Args: - request (google.cloud.compute_v1.types.PreviewRouterRequest): + request (Union[google.cloud.compute_v1.types.PreviewRouterRequest, dict]): The request object. A request message for Routers.Preview. See the method description for details. project (str): @@ -1112,13 +1124,13 @@ def preview( def update( self, - request: compute.UpdateRouterRequest = None, + request: Union[compute.UpdateRouterRequest, dict] = None, *, project: str = None, region: str = None, router: str = None, router_resource: compute.Router = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1130,7 +1142,7 @@ def update( payload. Args: - request (google.cloud.compute_v1.types.UpdateRouterRequest): + request (Union[google.cloud.compute_v1.types.UpdateRouterRequest, dict]): The request object. A request message for Routers.Update. See the method description for details. project (str): @@ -1217,6 +1229,19 @@ def update( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/routers/pagers.py b/google/cloud/compute_v1/services/routers/pagers.py index fbfba1403..5766e3564 100644 --- a/google/cloud/compute_v1/services/routers/pagers.py +++ b/google/cloud/compute_v1/services/routers/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RouterAggregatedList]: + def pages(self) -> Iterator[compute.RouterAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.RoutersScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.RoutersScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.VmEndpointNatMappingsList]: + def pages(self) -> Iterator[compute.VmEndpointNatMappingsList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.VmEndpointNatMappings]: + def __iter__(self) -> Iterator[compute.VmEndpointNatMappings]: for page in self.pages: yield from page.result @@ -201,14 +201,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RouterList]: + def pages(self) -> Iterator[compute.RouterList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Router]: + def __iter__(self) -> Iterator[compute.Router]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/routers/transports/base.py b/google/cloud/compute_v1/services/routers/transports/base.py index 6d22db5e7..a67ad65ea 100644 --- a/google/cloud/compute_v1/services/routers/transports/base.py +++ b/google/cloud/compute_v1/services/routers/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RoutersTransport(abc.ABC): """Abstract transport class for Routers.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -192,6 +156,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/routers/transports/rest.py b/google/cloud/compute_v1/services/routers/transports/rest.py index 5eddeeaa7..4d75a03d4 100644 --- a/google/cloud/compute_v1/services/routers/transports/rest.py +++ b/google/cloud/compute_v1/services/routers/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import RoutersTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import RoutersTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RoutersRestTransport(RoutersTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListRoutersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RouterAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( Routers.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( Contains a list of routers. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/routers".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/routers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListRoutersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListRoutersRequest.to_json( + compute.AggregatedListRoutersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListRoutersRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListRoutersRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListRoutersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListRoutersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListRoutersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListRoutersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -170,6 +221,9 @@ def delete( The request object. A request message for Routers.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -193,25 +247,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.DeleteRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRouterRequest.to_json( + compute.DeleteRouterRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRouterRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -221,10 +304,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Router: r"""Call the get method over HTTP. @@ -234,6 +319,9 @@ def get( The request object. A request message for Routers.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -245,23 +333,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.GetRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRouterRequest.to_json( + compute.GetRouterRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -271,10 +390,12 @@ def get( # Return the response return compute.Router.from_json(response.content, ignore_unknown_fields=True) - def get_nat_mapping_info( + def _get_nat_mapping_info( self, request: compute.GetNatMappingInfoRoutersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VmEndpointNatMappingsList: r"""Call the get nat mapping info method over HTTP. @@ -285,6 +406,9 @@ def get_nat_mapping_info( Routers.GetNatMappingInfo. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -295,33 +419,56 @@ def get_nat_mapping_info( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatMappingInfo".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatMappingInfo", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.GetNatMappingInfoRoutersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetNatMappingInfoRoutersRequest.to_json( + compute.GetNatMappingInfoRoutersRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetNatMappingInfoRoutersRequest.filter in request: - query_params["filter"] = request.filter - if compute.GetNatMappingInfoRoutersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.GetNatMappingInfoRoutersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.GetNatMappingInfoRoutersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.GetNatMappingInfoRoutersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -333,10 +480,12 @@ def get_nat_mapping_info( response.content, ignore_unknown_fields=True ) - def get_router_status( + def _get_router_status( self, request: compute.GetRouterStatusRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RouterStatusResponse: r"""Call the get router status method over HTTP. @@ -347,6 +496,9 @@ def get_router_status( Routers.GetRouterStatus. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -355,23 +507,56 @@ def get_router_status( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}/getRouterStatus".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}/getRouterStatus", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.GetRouterStatusRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRouterStatusRouterRequest.to_json( + compute.GetRouterStatusRouterRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -383,10 +568,12 @@ def get_router_status( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -396,6 +583,9 @@ def insert( The request object. A request message for Routers.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -419,30 +609,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers", + "body": "router_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Router.to_json( - request.router_resource, + compute.Router(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRouterRequest.to_json( + compute.InsertRouterRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRouterRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -453,10 +673,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRoutersRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RouterList: r"""Call the list method over HTTP. @@ -466,6 +688,9 @@ def list( The request object. A request message for Routers.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -474,30 +699,53 @@ def list( Contains a list of Router resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListRoutersRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRoutersRequest.to_json( + compute.ListRoutersRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRoutersRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRoutersRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRoutersRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRoutersRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRoutersRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -509,10 +757,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -522,6 +772,9 @@ def patch( The request object. A request message for Routers.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -545,33 +798,61 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}", + "body": "router_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.PatchRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Router.to_json( - request.router_resource, + compute.Router(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRouterRequest.to_json( + compute.PatchRouterRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRouterRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -582,10 +863,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def preview( + def _preview( self, request: compute.PreviewRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RoutersPreviewResponse: r"""Call the preview method over HTTP. @@ -596,6 +879,9 @@ def preview( Routers.Preview. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -604,31 +890,61 @@ def preview( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}/preview", + "body": "router_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.PreviewRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Router.to_json( - request.router_resource, + compute.Router(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}/preview".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PreviewRouterRequest.to_json( + compute.PreviewRouterRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -641,10 +957,12 @@ def preview( response.content, ignore_unknown_fields=True ) - def update( + def _update( self, request: compute.UpdateRouterRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -654,6 +972,9 @@ def update( The request object. A request message for Routers.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -677,33 +998,61 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/regions/{region}/routers/{router}", + "body": "router_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("router", "router"), + ] + + request_kwargs = compute.UpdateRouterRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Router.to_json( - request.router_resource, + compute.Router(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/routers/{router}".format( - host=self._host, - project=request.project, - region=request.region, - router=request.router, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateRouterRequest.to_json( + compute.UpdateRouterRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateRouterRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -714,5 +1063,58 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[[compute.AggregatedListRoutersRequest], compute.RouterAggregatedList]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteRouterRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetRouterRequest], compute.Router]: + return self._get + + @property + def get_nat_mapping_info( + self, + ) -> Callable[ + [compute.GetNatMappingInfoRoutersRequest], compute.VmEndpointNatMappingsList + ]: + return self._get_nat_mapping_info + + @property + def get_router_status( + self, + ) -> Callable[[compute.GetRouterStatusRouterRequest], compute.RouterStatusResponse]: + return self._get_router_status + + @property + def insert(self) -> Callable[[compute.InsertRouterRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListRoutersRequest], compute.RouterList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchRouterRequest], compute.Operation]: + return self._patch + + @property + def preview( + self, + ) -> Callable[[compute.PreviewRouterRequest], compute.RoutersPreviewResponse]: + return self._preview + + @property + def update(self) -> Callable[[compute.UpdateRouterRequest], compute.Operation]: + return self._update + + def close(self): + self._session.close() + __all__ = ("RoutersRestTransport",) diff --git a/google/cloud/compute_v1/services/routes/client.py b/google/cloud/compute_v1/services/routes/client.py index 0913ad774..7ff47c494 100644 --- a/google/cloud/compute_v1/services/routes/client.py +++ b/google/cloud/compute_v1/services/routes/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.routes import pagers from google.cloud.compute_v1.types import compute from .transports.base import RoutesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,22 +335,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteRouteRequest = None, + request: Union[compute.DeleteRouteRequest, dict] = None, *, project: str = None, route: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified Route resource. Args: - request (google.cloud.compute_v1.types.DeleteRouteRequest): + request (Union[google.cloud.compute_v1.types.DeleteRouteRequest, dict]): The request object. A request message for Routes.Delete. See the method description for details. project (str): @@ -412,11 +424,11 @@ def delete( def get( self, - request: compute.GetRouteRequest = None, + request: Union[compute.GetRouteRequest, dict] = None, *, project: str = None, route: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Route: @@ -424,7 +436,7 @@ def get( available routes by making a list() request. Args: - request (google.cloud.compute_v1.types.GetRouteRequest): + request (Union[google.cloud.compute_v1.types.GetRouteRequest, dict]): The request object. A request message for Routes.Get. See the method description for details. project (str): @@ -488,11 +500,11 @@ def get( def insert( self, - request: compute.InsertRouteRequest = None, + request: Union[compute.InsertRouteRequest, dict] = None, *, project: str = None, route_resource: compute.Route = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -500,7 +512,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertRouteRequest): + request (Union[google.cloud.compute_v1.types.InsertRouteRequest, dict]): The request object. A request message for Routes.Insert. See the method description for details. project (str): @@ -573,10 +585,10 @@ def insert( def list( self, - request: compute.ListRoutesRequest = None, + request: Union[compute.ListRoutesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -584,7 +596,7 @@ def list( the specified project. Args: - request (google.cloud.compute_v1.types.ListRoutesRequest): + request (Union[google.cloud.compute_v1.types.ListRoutesRequest, dict]): The request object. A request message for Routes.List. See the method description for details. project (str): @@ -643,6 +655,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/routes/pagers.py b/google/cloud/compute_v1/services/routes/pagers.py index 4d998556a..9d4de6b65 100644 --- a/google/cloud/compute_v1/services/routes/pagers.py +++ b/google/cloud/compute_v1/services/routes/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.RouteList]: + def pages(self) -> Iterator[compute.RouteList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Route]: + def __iter__(self) -> Iterator[compute.Route]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/routes/transports/base.py b/google/cloud/compute_v1/services/routes/transports/base.py index 846a9396e..bda790f31 100644 --- a/google/cloud/compute_v1/services/routes/transports/base.py +++ b/google/cloud/compute_v1/services/routes/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class RoutesTransport(abc.ABC): """Abstract transport class for Routes.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/routes/transports/rest.py b/google/cloud/compute_v1/services/routes/transports/rest.py index 6e61a3d99..52963a269 100644 --- a/google/cloud/compute_v1/services/routes/transports/rest.py +++ b/google/cloud/compute_v1/services/routes/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import RoutesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import RoutesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class RoutesRestTransport(RoutesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteRouteRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -111,6 +135,9 @@ def delete( The request object. A request message for Routes.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -134,22 +161,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/routes/{route}".format( - host=self._host, project=request.project, route=request.route, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/routes/{route}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("route", "route"), + ] + + request_kwargs = compute.DeleteRouteRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteRouteRequest.to_json( + compute.DeleteRouteRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteRouteRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -159,10 +217,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetRouteRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Route: r"""Call the get method over HTTP. @@ -172,6 +232,9 @@ def get( The request object. A request message for Routes.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -186,20 +249,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/routes/{route}".format( - host=self._host, project=request.project, route=request.route, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/routes/{route}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("route", "route"), + ] + + request_kwargs = compute.GetRouteRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRouteRequest.to_json( + compute.GetRouteRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -209,10 +305,12 @@ def get( # Return the response return compute.Route.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertRouteRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -222,6 +320,9 @@ def insert( The request object. A request message for Routes.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -245,30 +346,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/routes", + "body": "route_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertRouteRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Route.to_json( - request.route_resource, + compute.Route(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/routes".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertRouteRequest.to_json( + compute.InsertRouteRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertRouteRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -279,10 +409,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListRoutesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.RouteList: r"""Call the list method over HTTP. @@ -292,6 +424,9 @@ def list( The request object. A request message for Routes.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -300,30 +435,49 @@ def list( Contains a list of Route resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/routes".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/global/routes",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListRoutesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListRoutesRequest.to_json( + compute.ListRoutesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListRoutesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListRoutesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListRoutesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListRoutesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListRoutesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -333,5 +487,24 @@ def list( # Return the response return compute.RouteList.from_json(response.content, ignore_unknown_fields=True) + @property + def delete(self) -> Callable[[compute.DeleteRouteRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetRouteRequest], compute.Route]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertRouteRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListRoutesRequest], compute.RouteList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("RoutesRestTransport",) diff --git a/google/cloud/compute_v1/services/security_policies/client.py b/google/cloud/compute_v1/services/security_policies/client.py index ea91016ef..ab4d7df85 100644 --- a/google/cloud/compute_v1/services/security_policies/client.py +++ b/google/cloud/compute_v1/services/security_policies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.security_policies import pagers from google.cloud.compute_v1.types import compute from .transports.base import SecurityPoliciesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,23 +337,24 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_rule( self, - request: compute.AddRuleSecurityPolicyRequest = None, + request: Union[compute.AddRuleSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, security_policy_rule_resource: compute.SecurityPolicyRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Inserts a rule into a security policy. Args: - request (google.cloud.compute_v1.types.AddRuleSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.AddRuleSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.AddRule. See the method description for details. @@ -427,18 +439,18 @@ def add_rule( def delete( self, - request: compute.DeleteSecurityPolicyRequest = None, + request: Union[compute.DeleteSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified policy. Args: - request (google.cloud.compute_v1.types.DeleteSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.DeleteSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.Delete. See the method description for details. @@ -514,11 +526,11 @@ def delete( def get( self, - request: compute.GetSecurityPolicyRequest = None, + request: Union[compute.GetSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPolicy: @@ -526,7 +538,7 @@ def get( specified policy. Args: - request (google.cloud.compute_v1.types.GetSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.Get. See the method description for details. @@ -591,18 +603,18 @@ def get( def get_rule( self, - request: compute.GetRuleSecurityPolicyRequest = None, + request: Union[compute.GetRuleSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPolicyRule: r"""Gets a rule at the specified priority. Args: - request (google.cloud.compute_v1.types.GetRuleSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetRuleSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.GetRule. See the method description for details. @@ -667,11 +679,11 @@ def get_rule( def insert( self, - request: compute.InsertSecurityPolicyRequest = None, + request: Union[compute.InsertSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy_resource: compute.SecurityPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -679,7 +691,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.InsertSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.Insert. See the method description for details. @@ -753,10 +765,10 @@ def insert( def list( self, - request: compute.ListSecurityPoliciesRequest = None, + request: Union[compute.ListSecurityPoliciesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -764,7 +776,7 @@ def list( the specified project. Args: - request (google.cloud.compute_v1.types.ListSecurityPoliciesRequest): + request (Union[google.cloud.compute_v1.types.ListSecurityPoliciesRequest, dict]): The request object. A request message for SecurityPolicies.List. See the method description for details. @@ -825,10 +837,12 @@ def list( def list_preconfigured_expression_sets( self, - request: compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest = None, + request: Union[ + compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, dict + ] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: @@ -836,7 +850,7 @@ def list_preconfigured_expression_sets( Application Firewall (WAF) expressions. Args: - request (google.cloud.compute_v1.types.ListPreconfiguredExpressionSetsSecurityPoliciesRequest): + request (Union[google.cloud.compute_v1.types.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, dict]): The request object. A request message for SecurityPolicies.ListPreconfiguredExpressionSets. See the method description for details. @@ -894,12 +908,12 @@ def list_preconfigured_expression_sets( def patch( self, - request: compute.PatchSecurityPolicyRequest = None, + request: Union[compute.PatchSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, security_policy_resource: compute.SecurityPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -909,7 +923,7 @@ def patch( like addRule, patchRule, and removeRule instead. Args: - request (google.cloud.compute_v1.types.PatchSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.PatchSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.Patch. See the method description for details. @@ -992,19 +1006,19 @@ def patch( def patch_rule( self, - request: compute.PatchRuleSecurityPolicyRequest = None, + request: Union[compute.PatchRuleSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, security_policy_rule_resource: compute.SecurityPolicyRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Patches a rule at the specified priority. Args: - request (google.cloud.compute_v1.types.PatchRuleSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.PatchRuleSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.PatchRule. See the method description for details. @@ -1089,18 +1103,18 @@ def patch_rule( def remove_rule( self, - request: compute.RemoveRuleSecurityPolicyRequest = None, + request: Union[compute.RemoveRuleSecurityPolicyRequest, dict] = None, *, project: str = None, security_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes a rule at the specified priority. Args: - request (google.cloud.compute_v1.types.RemoveRuleSecurityPolicyRequest): + request (Union[google.cloud.compute_v1.types.RemoveRuleSecurityPolicyRequest, dict]): The request object. A request message for SecurityPolicies.RemoveRule. See the method description for details. @@ -1174,6 +1188,19 @@ def remove_rule( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/security_policies/pagers.py b/google/cloud/compute_v1/services/security_policies/pagers.py index b6062c6b6..f83c34276 100644 --- a/google/cloud/compute_v1/services/security_policies/pagers.py +++ b/google/cloud/compute_v1/services/security_policies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SecurityPolicyList]: + def pages(self) -> Iterator[compute.SecurityPolicyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.SecurityPolicy]: + def __iter__(self) -> Iterator[compute.SecurityPolicy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/security_policies/transports/base.py b/google/cloud/compute_v1/services/security_policies/transports/base.py index 2e6042ab3..12cbceabd 100644 --- a/google/cloud/compute_v1/services/security_policies/transports/base.py +++ b/google/cloud/compute_v1/services/security_policies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SecurityPoliciesTransport(abc.ABC): """Abstract transport class for SecurityPolicies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -192,6 +156,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_rule( self, diff --git a/google/cloud/compute_v1/services/security_policies/transports/rest.py b/google/cloud/compute_v1/services/security_policies/transports/rest.py index 801aa8084..efcf2e025 100644 --- a/google/cloud/compute_v1/services/security_policies/transports/rest.py +++ b/google/cloud/compute_v1/services/security_policies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + SecurityPoliciesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import SecurityPoliciesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class SecurityPoliciesRestTransport(SecurityPoliciesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_rule( + def _add_rule( self, request: compute.AddRuleSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add rule method over HTTP. @@ -112,6 +139,9 @@ def add_rule( SecurityPolicies.AddRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,30 +165,62 @@ def add_rule( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/addRule", + "body": "security_policy_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.AddRuleSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SecurityPolicyRule.to_json( - request.security_policy_rule_resource, + compute.SecurityPolicyRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/addRule".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddRuleSecurityPolicyRequest.to_json( + compute.AddRuleSecurityPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -169,10 +231,12 @@ def add_rule( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def delete( + def _delete( self, request: compute.DeleteSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -183,6 +247,9 @@ def delete( SecurityPolicies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -206,24 +273,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.DeleteSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSecurityPolicyRequest.to_json( + compute.DeleteSecurityPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteSecurityPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -233,10 +329,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPolicy: r"""Call the get method over HTTP. @@ -247,6 +345,9 @@ def get( SecurityPolicies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -261,22 +362,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.GetSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetSecurityPolicyRequest.to_json( + compute.GetSecurityPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -288,10 +420,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_rule( + def _get_rule( self, request: compute.GetRuleSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPolicyRule: r"""Call the get rule method over HTTP. @@ -302,6 +436,9 @@ def get_rule( SecurityPolicies.GetRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -314,24 +451,55 @@ def get_rule( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/getRule".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/getRule", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.GetRuleSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetRuleSecurityPolicyRequest.to_json( + compute.GetRuleSecurityPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.GetRuleSecurityPolicyRequest.priority in request: - query_params["priority"] = request.priority + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -343,10 +511,12 @@ def get_rule( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -357,6 +527,9 @@ def insert( SecurityPolicies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -380,30 +553,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/securityPolicies", + "body": "security_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SecurityPolicy.to_json( - request.security_policy_resource, + compute.SecurityPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertSecurityPolicyRequest.to_json( + compute.InsertSecurityPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertSecurityPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -414,10 +616,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListSecurityPoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPolicyList: r"""Call the list method over HTTP. @@ -428,6 +632,9 @@ def list( SecurityPolicies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -436,30 +643,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/securityPolicies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListSecurityPoliciesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListSecurityPoliciesRequest.to_json( + compute.ListSecurityPoliciesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListSecurityPoliciesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListSecurityPoliciesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListSecurityPoliciesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListSecurityPoliciesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListSecurityPoliciesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -471,10 +700,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_preconfigured_expression_sets( + def _list_preconfigured_expression_sets( self, request: compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse: r"""Call the list preconfigured @@ -486,6 +717,9 @@ def list_preconfigured_expression_sets( SecurityPolicies.ListPreconfiguredExpressionSets. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -494,45 +728,56 @@ def list_preconfigured_expression_sets( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/listPreconfiguredExpressionSets".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/listPreconfiguredExpressionSets", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.to_json( + compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.filter - in request - ): - query_params["filter"] = request.filter - if ( - compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.max_results - in request - ): - query_params["maxResults"] = request.max_results - if ( - compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.order_by - in request - ): - query_params["orderBy"] = request.order_by - if ( - compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.page_token - in request - ): - query_params["pageToken"] = request.page_token - if ( - compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -544,10 +789,12 @@ def list_preconfigured_expression_sets( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -558,6 +805,9 @@ def patch( SecurityPolicies.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -581,32 +831,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}", + "body": "security_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.PatchSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SecurityPolicy.to_json( - request.security_policy_resource, + compute.SecurityPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchSecurityPolicyRequest.to_json( + compute.PatchSecurityPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchSecurityPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -617,10 +895,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def patch_rule( + def _patch_rule( self, request: compute.PatchRuleSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch rule method over HTTP. @@ -631,6 +911,9 @@ def patch_rule( SecurityPolicies.PatchRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -654,32 +937,62 @@ def patch_rule( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/patchRule", + "body": "security_policy_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.PatchRuleSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SecurityPolicyRule.to_json( - request.security_policy_rule_resource, + compute.SecurityPolicyRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/patchRule".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchRuleSecurityPolicyRequest.to_json( + compute.PatchRuleSecurityPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchRuleSecurityPolicyRequest.priority in request: - query_params["priority"] = request.priority + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -690,10 +1003,12 @@ def patch_rule( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def remove_rule( + def _remove_rule( self, request: compute.RemoveRuleSecurityPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove rule method over HTTP. @@ -704,6 +1019,9 @@ def remove_rule( SecurityPolicies.RemoveRule. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -727,24 +1045,55 @@ def remove_rule( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/removeRule".format( - host=self._host, - project=request.project, - security_policy=request.security_policy, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/removeRule", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("security_policy", "securityPolicy"), + ] + + request_kwargs = compute.RemoveRuleSecurityPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveRuleSecurityPolicyRequest.to_json( + compute.RemoveRuleSecurityPolicyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveRuleSecurityPolicyRequest.priority in request: - query_params["priority"] = request.priority + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -754,5 +1103,71 @@ def remove_rule( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_rule( + self, + ) -> Callable[[compute.AddRuleSecurityPolicyRequest], compute.Operation]: + return self._add_rule + + @property + def delete( + self, + ) -> Callable[[compute.DeleteSecurityPolicyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetSecurityPolicyRequest], compute.SecurityPolicy]: + return self._get + + @property + def get_rule( + self, + ) -> Callable[[compute.GetRuleSecurityPolicyRequest], compute.SecurityPolicyRule]: + return self._get_rule + + @property + def insert( + self, + ) -> Callable[[compute.InsertSecurityPolicyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListSecurityPoliciesRequest], compute.SecurityPolicyList]: + return self._list + + @property + def list_preconfigured_expression_sets( + self, + ) -> Callable[ + [compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest], + compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse, + ]: + return self._list_preconfigured_expression_sets + + @property + def patch( + self, + ) -> Callable[[compute.PatchSecurityPolicyRequest], compute.Operation]: + return self._patch + + @property + def patch_rule( + self, + ) -> Callable[[compute.PatchRuleSecurityPolicyRequest], compute.Operation]: + return self._patch_rule + + @property + def remove_rule( + self, + ) -> Callable[[compute.RemoveRuleSecurityPolicyRequest], compute.Operation]: + return self._remove_rule + + def close(self): + self._session.close() + __all__ = ("SecurityPoliciesRestTransport",) diff --git a/google/cloud/compute_v1/services/service_attachments/client.py b/google/cloud/compute_v1/services/service_attachments/client.py index 30cf389f9..2f8914947 100644 --- a/google/cloud/compute_v1/services/service_attachments/client.py +++ b/google/cloud/compute_v1/services/service_attachments/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.service_attachments import pagers from google.cloud.compute_v1.types import compute from .transports.base import ServiceAttachmentsTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListServiceAttachmentsRequest = None, + request: Union[compute.AggregatedListServiceAttachmentsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -344,7 +356,7 @@ def aggregated_list( specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListServiceAttachmentsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListServiceAttachmentsRequest, dict]): The request object. A request message for ServiceAttachments.AggregatedList. See the method description for details. @@ -409,12 +421,12 @@ def aggregated_list( def delete( self, - request: compute.DeleteServiceAttachmentRequest = None, + request: Union[compute.DeleteServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, service_attachment: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -422,7 +434,7 @@ def delete( scope Args: - request (google.cloud.compute_v1.types.DeleteServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.DeleteServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.Delete. See the method description for details. @@ -505,12 +517,12 @@ def delete( def get( self, - request: compute.GetServiceAttachmentRequest = None, + request: Union[compute.GetServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, service_attachment: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ServiceAttachment: @@ -518,7 +530,7 @@ def get( the given scope. Args: - request (google.cloud.compute_v1.types.GetServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.GetServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.Get. See the method description for details. @@ -595,12 +607,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicyServiceAttachmentRequest = None, + request: Union[compute.GetIamPolicyServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -608,7 +620,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicyServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicyServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.GetIamPolicy. See the method description for details. @@ -717,12 +729,12 @@ def get_iam_policy( def insert( self, - request: compute.InsertServiceAttachmentRequest = None, + request: Union[compute.InsertServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, service_attachment_resource: compute.ServiceAttachment = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -731,7 +743,7 @@ def insert( included in the request. Args: - request (google.cloud.compute_v1.types.InsertServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.InsertServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.Insert. See the method description for details. @@ -812,11 +824,11 @@ def insert( def list( self, - request: compute.ListServiceAttachmentsRequest = None, + request: Union[compute.ListServiceAttachmentsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -824,7 +836,7 @@ def list( given scope. Args: - request (google.cloud.compute_v1.types.ListServiceAttachmentsRequest): + request (Union[google.cloud.compute_v1.types.ListServiceAttachmentsRequest, dict]): The request object. A request message for ServiceAttachments.List. See the method description for details. @@ -892,13 +904,13 @@ def list( def patch( self, - request: compute.PatchServiceAttachmentRequest = None, + request: Union[compute.PatchServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, service_attachment: str = None, service_attachment_resource: compute.ServiceAttachment = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -908,7 +920,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.PatchServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.Patch. See the method description for details. @@ -1004,13 +1016,13 @@ def patch( def set_iam_policy( self, - request: compute.SetIamPolicyServiceAttachmentRequest = None, + request: Union[compute.SetIamPolicyServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_policy_request_resource: compute.RegionSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1018,7 +1030,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicyServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicyServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.SetIamPolicy. See the method description for details. @@ -1138,13 +1150,13 @@ def set_iam_policy( def test_iam_permissions( self, - request: compute.TestIamPermissionsServiceAttachmentRequest = None, + request: Union[compute.TestIamPermissionsServiceAttachmentRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1152,7 +1164,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsServiceAttachmentRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsServiceAttachmentRequest, dict]): The request object. A request message for ServiceAttachments.TestIamPermissions. See the method description for details. @@ -1231,6 +1243,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/service_attachments/pagers.py b/google/cloud/compute_v1/services/service_attachments/pagers.py index 4b1fc4f22..49f30579b 100644 --- a/google/cloud/compute_v1/services/service_attachments/pagers.py +++ b/google/cloud/compute_v1/services/service_attachments/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ServiceAttachmentAggregatedList]: + def pages(self) -> Iterator[compute.ServiceAttachmentAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.ServiceAttachmentsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.ServiceAttachmentsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ServiceAttachmentList]: + def pages(self) -> Iterator[compute.ServiceAttachmentList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.ServiceAttachment]: + def __iter__(self) -> Iterator[compute.ServiceAttachment]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/service_attachments/transports/base.py b/google/cloud/compute_v1/services/service_attachments/transports/base.py index 609ecbbbf..2a80d5f46 100644 --- a/google/cloud/compute_v1/services/service_attachments/transports/base.py +++ b/google/cloud/compute_v1/services/service_attachments/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ServiceAttachmentsTransport(abc.ABC): """Abstract transport class for ServiceAttachments.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -189,6 +153,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/service_attachments/transports/rest.py b/google/cloud/compute_v1/services/service_attachments/transports/rest.py index 36a5e93a0..31348fdd3 100644 --- a/google/cloud/compute_v1/services/service_attachments/transports/rest.py +++ b/google/cloud/compute_v1/services/service_attachments/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + ServiceAttachmentsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import ServiceAttachmentsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ServiceAttachmentsRestTransport(ServiceAttachmentsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListServiceAttachmentsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ServiceAttachmentAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( ServiceAttachments.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -122,38 +152,56 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/serviceAttachments".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/serviceAttachments", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListServiceAttachmentsRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListServiceAttachmentsRequest.to_json( + compute.AggregatedListServiceAttachmentsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListServiceAttachmentsRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.AggregatedListServiceAttachmentsRequest.include_all_scopes - in request - ): - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListServiceAttachmentsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListServiceAttachmentsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListServiceAttachmentsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListServiceAttachmentsRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -165,10 +213,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -179,6 +229,9 @@ def delete( ServiceAttachments.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -202,25 +255,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( - host=self._host, - project=request.project, - region=request.region, - service_attachment=request.service_attachment, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("service_attachment", "serviceAttachment"), + ] + + request_kwargs = compute.DeleteServiceAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteServiceAttachmentRequest.to_json( + compute.DeleteServiceAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteServiceAttachmentRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -230,10 +314,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ServiceAttachment: r"""Call the get method over HTTP. @@ -244,6 +330,9 @@ def get( ServiceAttachments.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -261,23 +350,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( - host=self._host, - project=request.project, - region=request.region, - service_attachment=request.service_attachment, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("service_attachment", "serviceAttachment"), + ] + + request_kwargs = compute.GetServiceAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetServiceAttachmentRequest.to_json( + compute.GetServiceAttachmentRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -289,10 +409,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicyServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -303,6 +425,9 @@ def get_iam_policy( ServiceAttachments.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -351,30 +476,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicyServiceAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicyServiceAttachmentRequest.to_json( + compute.GetIamPolicyServiceAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicyServiceAttachmentRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -384,10 +535,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -398,6 +551,9 @@ def insert( ServiceAttachments.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -421,30 +577,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments", + "body": "service_attachment_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertServiceAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ServiceAttachment.to_json( - request.service_attachment_resource, + compute.ServiceAttachment(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertServiceAttachmentRequest.to_json( + compute.InsertServiceAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertServiceAttachmentRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -455,10 +643,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListServiceAttachmentsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ServiceAttachmentList: r"""Call the list method over HTTP. @@ -469,6 +659,9 @@ def list( ServiceAttachments.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -477,30 +670,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListServiceAttachmentsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListServiceAttachmentsRequest.to_json( + compute.ListServiceAttachmentsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListServiceAttachmentsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListServiceAttachmentsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListServiceAttachmentsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListServiceAttachmentsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListServiceAttachmentsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -512,10 +730,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -526,6 +746,9 @@ def patch( ServiceAttachments.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -549,33 +772,63 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}", + "body": "service_attachment_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("service_attachment", "serviceAttachment"), + ] + + request_kwargs = compute.PatchServiceAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.ServiceAttachment.to_json( - request.service_attachment_resource, + compute.ServiceAttachment(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}".format( - host=self._host, - project=request.project, - region=request.region, - service_attachment=request.service_attachment, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchServiceAttachmentRequest.to_json( + compute.PatchServiceAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchServiceAttachmentRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -586,10 +839,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicyServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -600,6 +855,9 @@ def set_iam_policy( ServiceAttachments.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -648,31 +906,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicyServiceAttachmentRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetPolicyRequest.to_json( - request.region_set_policy_request_resource, + compute.RegionSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicyServiceAttachmentRequest.to_json( + compute.SetIamPolicyServiceAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -683,10 +973,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsServiceAttachmentRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -697,6 +989,9 @@ def test_iam_permissions( ServiceAttachments.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -705,31 +1000,65 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsServiceAttachmentRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsServiceAttachmentRequest.to_json( + compute.TestIamPermissionsServiceAttachmentRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -742,5 +1071,70 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListServiceAttachmentsRequest], + compute.ServiceAttachmentAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteServiceAttachmentRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetServiceAttachmentRequest], compute.ServiceAttachment]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyServiceAttachmentRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert( + self, + ) -> Callable[[compute.InsertServiceAttachmentRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListServiceAttachmentsRequest], compute.ServiceAttachmentList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchServiceAttachmentRequest], compute.Operation]: + return self._patch + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyServiceAttachmentRequest], compute.Policy]: + return self._set_iam_policy + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsServiceAttachmentRequest], + compute.TestPermissionsResponse, + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("ServiceAttachmentsRestTransport",) diff --git a/google/cloud/compute_v1/services/snapshots/client.py b/google/cloud/compute_v1/services/snapshots/client.py index ff9d998df..1d1e1fa1c 100644 --- a/google/cloud/compute_v1/services/snapshots/client.py +++ b/google/cloud/compute_v1/services/snapshots/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.snapshots import pagers from google.cloud.compute_v1.types import compute from .transports.base import SnapshotsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,15 +335,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteSnapshotRequest = None, + request: Union[compute.DeleteSnapshotRequest, dict] = None, *, project: str = None, snapshot: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -345,7 +357,7 @@ def delete( Deleting snapshots. Args: - request (google.cloud.compute_v1.types.DeleteSnapshotRequest): + request (Union[google.cloud.compute_v1.types.DeleteSnapshotRequest, dict]): The request object. A request message for Snapshots.Delete. See the method description for details. @@ -421,11 +433,11 @@ def delete( def get( self, - request: compute.GetSnapshotRequest = None, + request: Union[compute.GetSnapshotRequest, dict] = None, *, project: str = None, snapshot: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Snapshot: @@ -433,7 +445,7 @@ def get( of available snapshots by making a list() request. Args: - request (google.cloud.compute_v1.types.GetSnapshotRequest): + request (Union[google.cloud.compute_v1.types.GetSnapshotRequest, dict]): The request object. A request message for Snapshots.Get. See the method description for details. project (str): @@ -498,11 +510,11 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicySnapshotRequest = None, + request: Union[compute.GetIamPolicySnapshotRequest, dict] = None, *, project: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -510,7 +522,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicySnapshotRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicySnapshotRequest, dict]): The request object. A request message for Snapshots.GetIamPolicy. See the method description for details. @@ -610,10 +622,10 @@ def get_iam_policy( def list( self, - request: compute.ListSnapshotsRequest = None, + request: Union[compute.ListSnapshotsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -621,7 +633,7 @@ def list( within the specified project. Args: - request (google.cloud.compute_v1.types.ListSnapshotsRequest): + request (Union[google.cloud.compute_v1.types.ListSnapshotsRequest, dict]): The request object. A request message for Snapshots.List. See the method description for details. project (str): @@ -683,12 +695,12 @@ def list( def set_iam_policy( self, - request: compute.SetIamPolicySnapshotRequest = None, + request: Union[compute.SetIamPolicySnapshotRequest, dict] = None, *, project: str = None, resource: str = None, global_set_policy_request_resource: compute.GlobalSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -696,7 +708,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicySnapshotRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicySnapshotRequest, dict]): The request object. A request message for Snapshots.SetIamPolicy. See the method description for details. @@ -807,12 +819,12 @@ def set_iam_policy( def set_labels( self, - request: compute.SetLabelsSnapshotRequest = None, + request: Union[compute.SetLabelsSnapshotRequest, dict] = None, *, project: str = None, resource: str = None, global_set_labels_request_resource: compute.GlobalSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -820,7 +832,7 @@ def set_labels( labels, read the Labeling Resources documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsSnapshotRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsSnapshotRequest, dict]): The request object. A request message for Snapshots.SetLabels. See the method description for details. @@ -907,12 +919,12 @@ def set_labels( def test_iam_permissions( self, - request: compute.TestIamPermissionsSnapshotRequest = None, + request: Union[compute.TestIamPermissionsSnapshotRequest, dict] = None, *, project: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -920,7 +932,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsSnapshotRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsSnapshotRequest, dict]): The request object. A request message for Snapshots.TestIamPermissions. See the method description for details. @@ -990,6 +1002,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/snapshots/pagers.py b/google/cloud/compute_v1/services/snapshots/pagers.py index 8d9aa9333..08661431a 100644 --- a/google/cloud/compute_v1/services/snapshots/pagers.py +++ b/google/cloud/compute_v1/services/snapshots/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SnapshotList]: + def pages(self) -> Iterator[compute.SnapshotList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Snapshot]: + def __iter__(self) -> Iterator[compute.Snapshot]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/snapshots/transports/base.py b/google/cloud/compute_v1/services/snapshots/transports/base.py index e34d66a84..84b0b0bf4 100644 --- a/google/cloud/compute_v1/services/snapshots/transports/base.py +++ b/google/cloud/compute_v1/services/snapshots/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SnapshotsTransport(abc.ABC): """Abstract transport class for Snapshots.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -183,6 +147,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/snapshots/transports/rest.py b/google/cloud/compute_v1/services/snapshots/transports/rest.py index 339e6f105..60564b6a9 100644 --- a/google/cloud/compute_v1/services/snapshots/transports/rest.py +++ b/google/cloud/compute_v1/services/snapshots/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import SnapshotsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import SnapshotsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class SnapshotsRestTransport(SnapshotsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteSnapshotRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +136,9 @@ def delete( Snapshots.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,22 +162,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots/{snapshot}".format( - host=self._host, project=request.project, snapshot=request.snapshot, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/snapshots/{snapshot}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("snapshot", "snapshot"), + ] + + request_kwargs = compute.DeleteSnapshotRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSnapshotRequest.to_json( + compute.DeleteSnapshotRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteSnapshotRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +218,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetSnapshotRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Snapshot: r"""Call the get method over HTTP. @@ -173,6 +233,9 @@ def get( The request object. A request message for Snapshots.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -186,20 +249,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots/{snapshot}".format( - host=self._host, project=request.project, snapshot=request.snapshot, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/snapshots/{snapshot}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("snapshot", "snapshot"), + ] + + request_kwargs = compute.GetSnapshotRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetSnapshotRequest.to_json( + compute.GetSnapshotRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -209,10 +305,12 @@ def get( # Return the response return compute.Snapshot.from_json(response.content, ignore_unknown_fields=True) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicySnapshotRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -223,6 +321,9 @@ def get_iam_policy( Snapshots.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -271,27 +372,53 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots/{resource}/getIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/snapshots/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicySnapshotRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicySnapshotRequest.to_json( + compute.GetIamPolicySnapshotRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicySnapshotRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -301,10 +428,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListSnapshotsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SnapshotList: r"""Call the list method over HTTP. @@ -314,6 +443,9 @@ def list( The request object. A request message for Snapshots.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -324,30 +456,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/snapshots", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListSnapshotsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListSnapshotsRequest.to_json( + compute.ListSnapshotsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListSnapshotsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListSnapshotsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListSnapshotsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListSnapshotsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListSnapshotsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -359,10 +513,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicySnapshotRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -373,6 +529,9 @@ def set_iam_policy( Snapshots.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -421,28 +580,60 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/snapshots/{resource}/setIamPolicy", + "body": "global_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicySnapshotRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetPolicyRequest.to_json( - request.global_set_policy_request_resource, + compute.GlobalSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots/{resource}/setIamPolicy".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicySnapshotRequest.to_json( + compute.SetIamPolicySnapshotRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -453,10 +644,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_labels( + def _set_labels( self, request: compute.SetLabelsSnapshotRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -467,6 +660,9 @@ def set_labels( Snapshots.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -490,28 +686,60 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/snapshots/{resource}/setLabels", + "body": "global_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsSnapshotRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.GlobalSetLabelsRequest.to_json( - request.global_set_labels_request_resource, + compute.GlobalSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots/{resource}/setLabels".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsSnapshotRequest.to_json( + compute.SetLabelsSnapshotRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -522,10 +750,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsSnapshotRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -536,6 +766,9 @@ def test_iam_permissions( Snapshots.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -544,28 +777,62 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/snapshots/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsSnapshotRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/snapshots/{resource}/testIamPermissions".format( - host=self._host, project=request.project, resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsSnapshotRequest.to_json( + compute.TestIamPermissionsSnapshotRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -578,5 +845,46 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def delete(self) -> Callable[[compute.DeleteSnapshotRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetSnapshotRequest], compute.Snapshot]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicySnapshotRequest], compute.Policy]: + return self._get_iam_policy + + @property + def list(self) -> Callable[[compute.ListSnapshotsRequest], compute.SnapshotList]: + return self._list + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicySnapshotRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsSnapshotRequest], compute.Operation]: + return self._set_labels + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsSnapshotRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("SnapshotsRestTransport",) diff --git a/google/cloud/compute_v1/services/ssl_certificates/client.py b/google/cloud/compute_v1/services/ssl_certificates/client.py index 05cb05c94..d38d274e1 100644 --- a/google/cloud/compute_v1/services/ssl_certificates/client.py +++ b/google/cloud/compute_v1/services/ssl_certificates/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.ssl_certificates import pagers from google.cloud.compute_v1.types import compute from .transports.base import SslCertificatesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,14 +337,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListSslCertificatesRequest = None, + request: Union[compute.AggregatedListSslCertificatesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -341,7 +353,7 @@ def aggregated_list( regional and global, available to the specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListSslCertificatesRequest, dict]): The request object. A request message for SslCertificates.AggregatedList. See the method description for details. @@ -404,18 +416,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteSslCertificateRequest = None, + request: Union[compute.DeleteSslCertificateRequest, dict] = None, *, project: str = None, ssl_certificate: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified SslCertificate resource. Args: - request (google.cloud.compute_v1.types.DeleteSslCertificateRequest): + request (Union[google.cloud.compute_v1.types.DeleteSslCertificateRequest, dict]): The request object. A request message for SslCertificates.Delete. See the method description for details. @@ -491,11 +503,11 @@ def delete( def get( self, - request: compute.GetSslCertificateRequest = None, + request: Union[compute.GetSslCertificateRequest, dict] = None, *, project: str = None, ssl_certificate: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificate: @@ -504,7 +516,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetSslCertificateRequest): + request (Union[google.cloud.compute_v1.types.GetSslCertificateRequest, dict]): The request object. A request message for SslCertificates.Get. See the method description for details. @@ -581,11 +593,11 @@ def get( def insert( self, - request: compute.InsertSslCertificateRequest = None, + request: Union[compute.InsertSslCertificateRequest, dict] = None, *, project: str = None, ssl_certificate_resource: compute.SslCertificate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -593,7 +605,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertSslCertificateRequest): + request (Union[google.cloud.compute_v1.types.InsertSslCertificateRequest, dict]): The request object. A request message for SslCertificates.Insert. See the method description for details. @@ -667,10 +679,10 @@ def insert( def list( self, - request: compute.ListSslCertificatesRequest = None, + request: Union[compute.ListSslCertificatesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -678,7 +690,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListSslCertificatesRequest): + request (Union[google.cloud.compute_v1.types.ListSslCertificatesRequest, dict]): The request object. A request message for SslCertificates.List. See the method description for details. @@ -739,6 +751,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/ssl_certificates/pagers.py b/google/cloud/compute_v1/services/ssl_certificates/pagers.py index ee47245a8..084f15592 100644 --- a/google/cloud/compute_v1/services/ssl_certificates/pagers.py +++ b/google/cloud/compute_v1/services/ssl_certificates/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SslCertificateAggregatedList]: + def pages(self) -> Iterator[compute.SslCertificateAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.SslCertificatesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.SslCertificatesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SslCertificateList]: + def pages(self) -> Iterator[compute.SslCertificateList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.SslCertificate]: + def __iter__(self) -> Iterator[compute.SslCertificate]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/ssl_certificates/transports/base.py b/google/cloud/compute_v1/services/ssl_certificates/transports/base.py index 0b9d8a4ea..870e04778 100644 --- a/google/cloud/compute_v1/services/ssl_certificates/transports/base.py +++ b/google/cloud/compute_v1/services/ssl_certificates/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SslCertificatesTransport(abc.ABC): """Abstract transport class for SslCertificates.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py b/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py index 8988cc244..876b34288 100644 --- a/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py +++ b/google/cloud/compute_v1/services/ssl_certificates/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + SslCertificatesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import SslCertificatesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class SslCertificatesRestTransport(SslCertificatesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListSslCertificatesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificateAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( SslCertificates.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/sslCertificates".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/sslCertificates", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListSslCertificatesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListSslCertificatesRequest.to_json( + compute.AggregatedListSslCertificatesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListSslCertificatesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListSslCertificatesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListSslCertificatesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListSslCertificatesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListSslCertificatesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListSslCertificatesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteSslCertificateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( SslCertificates.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,24 +251,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}".format( - host=self._host, - project=request.project, - ssl_certificate=request.ssl_certificate, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("ssl_certificate", "sslCertificate"), + ] + + request_kwargs = compute.DeleteSslCertificateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSslCertificateRequest.to_json( + compute.DeleteSslCertificateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteSslCertificateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -224,10 +307,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetSslCertificateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificate: r"""Call the get method over HTTP. @@ -238,6 +323,9 @@ def get( SslCertificates.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -262,22 +350,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}".format( - host=self._host, - project=request.project, - ssl_certificate=request.ssl_certificate, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("ssl_certificate", "sslCertificate"), + ] + + request_kwargs = compute.GetSslCertificateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetSslCertificateRequest.to_json( + compute.GetSslCertificateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -289,10 +408,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertSslCertificateRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -303,6 +424,9 @@ def insert( SslCertificates.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -326,30 +450,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/sslCertificates", + "body": "ssl_certificate_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertSslCertificateRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SslCertificate.to_json( - request.ssl_certificate_resource, + compute.SslCertificate(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslCertificates".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertSslCertificateRequest.to_json( + compute.InsertSslCertificateRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertSslCertificateRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -360,10 +513,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListSslCertificatesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslCertificateList: r"""Call the list method over HTTP. @@ -374,6 +529,9 @@ def list( SslCertificates.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -384,30 +542,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslCertificates".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/sslCertificates", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListSslCertificatesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListSslCertificatesRequest.to_json( + compute.ListSslCertificatesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListSslCertificatesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListSslCertificatesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListSslCertificatesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListSslCertificatesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListSslCertificatesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -419,5 +599,41 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListSslCertificatesRequest], + compute.SslCertificateAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteSslCertificateRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetSslCertificateRequest], compute.SslCertificate]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertSslCertificateRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListSslCertificatesRequest], compute.SslCertificateList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("SslCertificatesRestTransport",) diff --git a/google/cloud/compute_v1/services/ssl_policies/client.py b/google/cloud/compute_v1/services/ssl_policies/client.py index 05d3f174d..00051a104 100644 --- a/google/cloud/compute_v1/services/ssl_policies/client.py +++ b/google/cloud/compute_v1/services/ssl_policies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.ssl_policies import pagers from google.cloud.compute_v1.types import compute from .transports.base import SslPoliciesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,15 +335,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteSslPolicyRequest = None, + request: Union[compute.DeleteSslPolicyRequest, dict] = None, *, project: str = None, ssl_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -341,7 +353,7 @@ def delete( TargetHttpsProxy or TargetSslProxy resources. Args: - request (google.cloud.compute_v1.types.DeleteSslPolicyRequest): + request (Union[google.cloud.compute_v1.types.DeleteSslPolicyRequest, dict]): The request object. A request message for SslPolicies.Delete. See the method description for details. @@ -418,11 +430,11 @@ def delete( def get( self, - request: compute.GetSslPolicyRequest = None, + request: Union[compute.GetSslPolicyRequest, dict] = None, *, project: str = None, ssl_policy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslPolicy: @@ -430,7 +442,7 @@ def get( specified policy. Args: - request (google.cloud.compute_v1.types.GetSslPolicyRequest): + request (Union[google.cloud.compute_v1.types.GetSslPolicyRequest, dict]): The request object. A request message for SslPolicies.Get. See the method description for details. project (str): @@ -497,11 +509,11 @@ def get( def insert( self, - request: compute.InsertSslPolicyRequest = None, + request: Union[compute.InsertSslPolicyRequest, dict] = None, *, project: str = None, ssl_policy_resource: compute.SslPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -510,7 +522,7 @@ def insert( request. Args: - request (google.cloud.compute_v1.types.InsertSslPolicyRequest): + request (Union[google.cloud.compute_v1.types.InsertSslPolicyRequest, dict]): The request object. A request message for SslPolicies.Insert. See the method description for details. @@ -584,10 +596,10 @@ def insert( def list( self, - request: compute.ListSslPoliciesRequest = None, + request: Union[compute.ListSslPoliciesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -595,7 +607,7 @@ def list( for the specified project. Args: - request (google.cloud.compute_v1.types.ListSslPoliciesRequest): + request (Union[google.cloud.compute_v1.types.ListSslPoliciesRequest, dict]): The request object. A request message for SslPolicies.List. See the method description for details. @@ -656,10 +668,10 @@ def list( def list_available_features( self, - request: compute.ListAvailableFeaturesSslPoliciesRequest = None, + request: Union[compute.ListAvailableFeaturesSslPoliciesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslPoliciesListAvailableFeaturesResponse: @@ -667,7 +679,7 @@ def list_available_features( policy when using custom profile. Args: - request (google.cloud.compute_v1.types.ListAvailableFeaturesSslPoliciesRequest): + request (Union[google.cloud.compute_v1.types.ListAvailableFeaturesSslPoliciesRequest, dict]): The request object. A request message for SslPolicies.ListAvailableFeatures. See the method description for details. @@ -719,12 +731,12 @@ def list_available_features( def patch( self, - request: compute.PatchSslPolicyRequest = None, + request: Union[compute.PatchSslPolicyRequest, dict] = None, *, project: str = None, ssl_policy: str = None, ssl_policy_resource: compute.SslPolicy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -732,7 +744,7 @@ def patch( included in the request. Args: - request (google.cloud.compute_v1.types.PatchSslPolicyRequest): + request (Union[google.cloud.compute_v1.types.PatchSslPolicyRequest, dict]): The request object. A request message for SslPolicies.Patch. See the method description for details. @@ -814,6 +826,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/ssl_policies/pagers.py b/google/cloud/compute_v1/services/ssl_policies/pagers.py index e8c88b43b..12cb421da 100644 --- a/google/cloud/compute_v1/services/ssl_policies/pagers.py +++ b/google/cloud/compute_v1/services/ssl_policies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SslPoliciesList]: + def pages(self) -> Iterator[compute.SslPoliciesList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.SslPolicy]: + def __iter__(self) -> Iterator[compute.SslPolicy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/ssl_policies/transports/base.py b/google/cloud/compute_v1/services/ssl_policies/transports/base.py index bb586a325..1f9a02a9d 100644 --- a/google/cloud/compute_v1/services/ssl_policies/transports/base.py +++ b/google/cloud/compute_v1/services/ssl_policies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SslPoliciesTransport(abc.ABC): """Abstract transport class for SslPolicies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -180,6 +144,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/ssl_policies/transports/rest.py b/google/cloud/compute_v1/services/ssl_policies/transports/rest.py index df57d208e..ec533fe3d 100644 --- a/google/cloud/compute_v1/services/ssl_policies/transports/rest.py +++ b/google/cloud/compute_v1/services/ssl_policies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import SslPoliciesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import SslPoliciesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class SslPoliciesRestTransport(SslPoliciesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteSslPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +136,9 @@ def delete( SslPolicies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,22 +162,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}".format( - host=self._host, project=request.project, ssl_policy=request.ssl_policy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("ssl_policy", "sslPolicy"), + ] + + request_kwargs = compute.DeleteSslPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSslPolicyRequest.to_json( + compute.DeleteSslPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteSslPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +218,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetSslPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslPolicy: r"""Call the get method over HTTP. @@ -174,6 +234,9 @@ def get( SslPolicies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -188,20 +251,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}".format( - host=self._host, project=request.project, ssl_policy=request.ssl_policy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("ssl_policy", "sslPolicy"), + ] + + request_kwargs = compute.GetSslPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetSslPolicyRequest.to_json( + compute.GetSslPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -211,10 +307,12 @@ def get( # Return the response return compute.SslPolicy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertSslPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -225,6 +323,9 @@ def insert( SslPolicies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -248,30 +349,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/sslPolicies", + "body": "ssl_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertSslPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SslPolicy.to_json( - request.ssl_policy_resource, + compute.SslPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslPolicies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertSslPolicyRequest.to_json( + compute.InsertSslPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertSslPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -282,10 +412,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListSslPoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslPoliciesList: r"""Call the list method over HTTP. @@ -296,6 +428,9 @@ def list( SslPolicies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -304,30 +439,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslPolicies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/sslPolicies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListSslPoliciesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListSslPoliciesRequest.to_json( + compute.ListSslPoliciesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListSslPoliciesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListSslPoliciesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListSslPoliciesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListSslPoliciesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListSslPoliciesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -339,10 +496,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_available_features( + def _list_available_features( self, request: compute.ListAvailableFeaturesSslPoliciesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SslPoliciesListAvailableFeaturesResponse: r"""Call the list available features method over HTTP. @@ -353,6 +512,9 @@ def list_available_features( SslPolicies.ListAvailableFeatures. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -361,33 +523,56 @@ def list_available_features( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslPolicies/listAvailableFeatures".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/sslPolicies/listAvailableFeatures", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListAvailableFeaturesSslPoliciesRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListAvailableFeaturesSslPoliciesRequest.to_json( + compute.ListAvailableFeaturesSslPoliciesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListAvailableFeaturesSslPoliciesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListAvailableFeaturesSslPoliciesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListAvailableFeaturesSslPoliciesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListAvailableFeaturesSslPoliciesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.ListAvailableFeaturesSslPoliciesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -399,10 +584,12 @@ def list_available_features( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchSslPolicyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -413,6 +600,9 @@ def patch( SslPolicies.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -436,30 +626,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}", + "body": "ssl_policy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("ssl_policy", "sslPolicy"), + ] + + request_kwargs = compute.PatchSslPolicyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SslPolicy.to_json( - request.ssl_policy_resource, + compute.SslPolicy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}".format( - host=self._host, project=request.project, ssl_policy=request.ssl_policy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchSslPolicyRequest.to_json( + compute.PatchSslPolicyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchSslPolicyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -470,5 +690,39 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete(self) -> Callable[[compute.DeleteSslPolicyRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetSslPolicyRequest], compute.SslPolicy]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertSslPolicyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListSslPoliciesRequest], compute.SslPoliciesList]: + return self._list + + @property + def list_available_features( + self, + ) -> Callable[ + [compute.ListAvailableFeaturesSslPoliciesRequest], + compute.SslPoliciesListAvailableFeaturesResponse, + ]: + return self._list_available_features + + @property + def patch(self) -> Callable[[compute.PatchSslPolicyRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("SslPoliciesRestTransport",) diff --git a/google/cloud/compute_v1/services/subnetworks/client.py b/google/cloud/compute_v1/services/subnetworks/client.py index be2c05a13..abee5121c 100644 --- a/google/cloud/compute_v1/services/subnetworks/client.py +++ b/google/cloud/compute_v1/services/subnetworks/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.subnetworks import pagers from google.cloud.compute_v1.types import compute from .transports.base import SubnetworksTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListSubnetworksRequest = None, + request: Union[compute.AggregatedListSubnetworksRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of subnetworks. Args: - request (google.cloud.compute_v1.types.AggregatedListSubnetworksRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListSubnetworksRequest, dict]): The request object. A request message for Subnetworks.AggregatedList. See the method description for details. @@ -399,19 +411,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteSubnetworkRequest = None, + request: Union[compute.DeleteSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, subnetwork: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified subnetwork. Args: - request (google.cloud.compute_v1.types.DeleteSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.DeleteSubnetworkRequest, dict]): The request object. A request message for Subnetworks.Delete. See the method description for details. @@ -496,13 +508,13 @@ def delete( def expand_ip_cidr_range( self, - request: compute.ExpandIpCidrRangeSubnetworkRequest = None, + request: Union[compute.ExpandIpCidrRangeSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, subnetwork: str = None, subnetworks_expand_ip_cidr_range_request_resource: compute.SubnetworksExpandIpCidrRangeRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -510,7 +522,7 @@ def expand_ip_cidr_range( specified value. Args: - request (google.cloud.compute_v1.types.ExpandIpCidrRangeSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.ExpandIpCidrRangeSubnetworkRequest, dict]): The request object. A request message for Subnetworks.ExpandIpCidrRange. See the method description for details. @@ -611,12 +623,12 @@ def expand_ip_cidr_range( def get( self, - request: compute.GetSubnetworkRequest = None, + request: Union[compute.GetSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, subnetwork: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Subnetwork: @@ -624,7 +636,7 @@ def get( available subnetworks list() request. Args: - request (google.cloud.compute_v1.types.GetSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.GetSubnetworkRequest, dict]): The request object. A request message for Subnetworks.Get. See the method description for details. project (str): @@ -700,12 +712,12 @@ def get( def get_iam_policy( self, - request: compute.GetIamPolicySubnetworkRequest = None, + request: Union[compute.GetIamPolicySubnetworkRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -713,7 +725,7 @@ def get_iam_policy( empty if no such policy or resource exists. Args: - request (google.cloud.compute_v1.types.GetIamPolicySubnetworkRequest): + request (Union[google.cloud.compute_v1.types.GetIamPolicySubnetworkRequest, dict]): The request object. A request message for Subnetworks.GetIamPolicy. See the method description for details. @@ -822,12 +834,12 @@ def get_iam_policy( def insert( self, - request: compute.InsertSubnetworkRequest = None, + request: Union[compute.InsertSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, subnetwork_resource: compute.Subnetwork = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -835,7 +847,7 @@ def insert( the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.InsertSubnetworkRequest, dict]): The request object. A request message for Subnetworks.Insert. See the method description for details. @@ -918,11 +930,11 @@ def insert( def list( self, - request: compute.ListSubnetworksRequest = None, + request: Union[compute.ListSubnetworksRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -930,7 +942,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListSubnetworksRequest): + request (Union[google.cloud.compute_v1.types.ListSubnetworksRequest, dict]): The request object. A request message for Subnetworks.List. See the method description for details. @@ -1002,10 +1014,10 @@ def list( def list_usable( self, - request: compute.ListUsableSubnetworksRequest = None, + request: Union[compute.ListUsableSubnetworksRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListUsablePager: @@ -1013,7 +1025,7 @@ def list_usable( subnetworks in the project. Args: - request (google.cloud.compute_v1.types.ListUsableSubnetworksRequest): + request (Union[google.cloud.compute_v1.types.ListUsableSubnetworksRequest, dict]): The request object. A request message for Subnetworks.ListUsable. See the method description for details. @@ -1074,13 +1086,13 @@ def list_usable( def patch( self, - request: compute.PatchSubnetworkRequest = None, + request: Union[compute.PatchSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, subnetwork: str = None, subnetwork_resource: compute.Subnetwork = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1091,7 +1103,7 @@ def patch( of the subnetwork resource being patched. Args: - request (google.cloud.compute_v1.types.PatchSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.PatchSubnetworkRequest, dict]): The request object. A request message for Subnetworks.Patch. See the method description for details. @@ -1183,13 +1195,13 @@ def patch( def set_iam_policy( self, - request: compute.SetIamPolicySubnetworkRequest = None, + request: Union[compute.SetIamPolicySubnetworkRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_policy_request_resource: compute.RegionSetPolicyRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: @@ -1197,7 +1209,7 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (google.cloud.compute_v1.types.SetIamPolicySubnetworkRequest): + request (Union[google.cloud.compute_v1.types.SetIamPolicySubnetworkRequest, dict]): The request object. A request message for Subnetworks.SetIamPolicy. See the method description for details. @@ -1317,13 +1329,13 @@ def set_iam_policy( def set_private_ip_google_access( self, - request: compute.SetPrivateIpGoogleAccessSubnetworkRequest = None, + request: Union[compute.SetPrivateIpGoogleAccessSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, subnetwork: str = None, subnetworks_set_private_ip_google_access_request_resource: compute.SubnetworksSetPrivateIpGoogleAccessRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1332,7 +1344,7 @@ def set_private_ip_google_access( Private Google Access. Args: - request (google.cloud.compute_v1.types.SetPrivateIpGoogleAccessSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.SetPrivateIpGoogleAccessSubnetworkRequest, dict]): The request object. A request message for Subnetworks.SetPrivateIpGoogleAccess. See the method description for details. @@ -1433,13 +1445,13 @@ def set_private_ip_google_access( def test_iam_permissions( self, - request: compute.TestIamPermissionsSubnetworkRequest = None, + request: Union[compute.TestIamPermissionsSubnetworkRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -1447,7 +1459,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsSubnetworkRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsSubnetworkRequest, dict]): The request object. A request message for Subnetworks.TestIamPermissions. See the method description for details. @@ -1526,6 +1538,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/subnetworks/pagers.py b/google/cloud/compute_v1/services/subnetworks/pagers.py index eea55ee21..1f4b5ad2e 100644 --- a/google/cloud/compute_v1/services/subnetworks/pagers.py +++ b/google/cloud/compute_v1/services/subnetworks/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SubnetworkAggregatedList]: + def pages(self) -> Iterator[compute.SubnetworkAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.SubnetworksScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.SubnetworksScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.SubnetworkList]: + def pages(self) -> Iterator[compute.SubnetworkList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Subnetwork]: + def __iter__(self) -> Iterator[compute.Subnetwork]: for page in self.pages: yield from page.items @@ -201,14 +201,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.UsableSubnetworksAggregatedList]: + def pages(self) -> Iterator[compute.UsableSubnetworksAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.UsableSubnetwork]: + def __iter__(self) -> Iterator[compute.UsableSubnetwork]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/subnetworks/transports/base.py b/google/cloud/compute_v1/services/subnetworks/transports/base.py index ff3498abe..bcb3eaafe 100644 --- a/google/cloud/compute_v1/services/subnetworks/transports/base.py +++ b/google/cloud/compute_v1/services/subnetworks/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class SubnetworksTransport(abc.ABC): """Abstract transport class for Subnetworks.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -202,6 +166,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/subnetworks/transports/rest.py b/google/cloud/compute_v1/services/subnetworks/transports/rest.py index 45ee9eb24..cf9a91616 100644 --- a/google/cloud/compute_v1/services/subnetworks/transports/rest.py +++ b/google/cloud/compute_v1/services/subnetworks/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import SubnetworksTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import SubnetworksTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class SubnetworksRestTransport(SubnetworksTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListSubnetworksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SubnetworkAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( Subnetworks.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/subnetworks".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListSubnetworksRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListSubnetworksRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListSubnetworksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListSubnetworksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListSubnetworksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListSubnetworksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/subnetworks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListSubnetworksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListSubnetworksRequest.to_json( + compute.AggregatedListSubnetworksRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( Subnetworks.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +248,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}".format( - host=self._host, - project=request.project, - region=request.region, - subnetwork=request.subnetwork, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("subnetwork", "subnetwork"), + ] + + request_kwargs = compute.DeleteSubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteSubnetworkRequest.to_json( + compute.DeleteSubnetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteSubnetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +305,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def expand_ip_cidr_range( + def _expand_ip_cidr_range( self, request: compute.ExpandIpCidrRangeSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the expand ip cidr range method over HTTP. @@ -236,6 +321,9 @@ def expand_ip_cidr_range( Subnetworks.ExpandIpCidrRange. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -259,33 +347,63 @@ def expand_ip_cidr_range( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange", + "body": "subnetworks_expand_ip_cidr_range_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("subnetwork", "subnetwork"), + ] + + request_kwargs = compute.ExpandIpCidrRangeSubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SubnetworksExpandIpCidrRangeRequest.to_json( - request.subnetworks_expand_ip_cidr_range_request_resource, + compute.SubnetworksExpandIpCidrRangeRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange".format( - host=self._host, - project=request.project, - region=request.region, - subnetwork=request.subnetwork, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ExpandIpCidrRangeSubnetworkRequest.to_json( + compute.ExpandIpCidrRangeSubnetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ExpandIpCidrRangeSubnetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -296,10 +414,12 @@ def expand_ip_cidr_range( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Subnetwork: r"""Call the get method over HTTP. @@ -310,6 +430,9 @@ def get( Subnetworks.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -325,23 +448,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}".format( - host=self._host, - project=request.project, - region=request.region, - subnetwork=request.subnetwork, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("subnetwork", "subnetwork"), + ] + + request_kwargs = compute.GetSubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetSubnetworkRequest.to_json( + compute.GetSubnetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -353,10 +507,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_iam_policy( + def _get_iam_policy( self, request: compute.GetIamPolicySubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the get iam policy method over HTTP. @@ -367,6 +523,9 @@ def get_iam_policy( Subnetworks.GetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -415,30 +574,56 @@ def get_iam_policy( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if ( - compute.GetIamPolicySubnetworkRequest.options_requested_policy_version - in request - ): - query_params[ - "optionsRequestedPolicyVersion" - ] = request.options_requested_policy_version + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.GetIamPolicySubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetIamPolicySubnetworkRequest.to_json( + compute.GetIamPolicySubnetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -448,10 +633,12 @@ def get_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -462,6 +649,9 @@ def insert( Subnetworks.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -485,30 +675,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks", + "body": "subnetwork_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertSubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Subnetwork.to_json( - request.subnetwork_resource, + compute.Subnetwork(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertSubnetworkRequest.to_json( + compute.InsertSubnetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertSubnetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -519,10 +739,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListSubnetworksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.SubnetworkList: r"""Call the list method over HTTP. @@ -533,6 +755,9 @@ def list( Subnetworks.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -543,30 +768,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks".format( - host=self._host, project=request.project, region=request.region, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListSubnetworksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListSubnetworksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListSubnetworksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListSubnetworksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListSubnetworksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListSubnetworksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListSubnetworksRequest.to_json( + compute.ListSubnetworksRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -578,10 +826,12 @@ def list( response.content, ignore_unknown_fields=True ) - def list_usable( + def _list_usable( self, request: compute.ListUsableSubnetworksRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UsableSubnetworksAggregatedList: r"""Call the list usable method over HTTP. @@ -592,6 +842,9 @@ def list_usable( Subnetworks.ListUsable. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -600,30 +853,54 @@ def list_usable( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/subnetworks/listUsable".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListUsableSubnetworksRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListUsableSubnetworksRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListUsableSubnetworksRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListUsableSubnetworksRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListUsableSubnetworksRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/subnetworks/listUsable", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListUsableSubnetworksRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListUsableSubnetworksRequest.to_json( + compute.ListUsableSubnetworksRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -635,10 +912,12 @@ def list_usable( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -649,6 +928,9 @@ def patch( Subnetworks.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -672,35 +954,61 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}", + "body": "subnetwork_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("subnetwork", "subnetwork"), + ] + + request_kwargs = compute.PatchSubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.Subnetwork.to_json( - request.subnetwork_resource, + compute.Subnetwork(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}".format( - host=self._host, - project=request.project, - region=request.region, - subnetwork=request.subnetwork, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchSubnetworkRequest.to_json( + compute.PatchSubnetworkRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchSubnetworkRequest.drain_timeout_seconds in request: - query_params["drainTimeoutSeconds"] = request.drain_timeout_seconds - if compute.PatchSubnetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -711,10 +1019,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_iam_policy( + def _set_iam_policy( self, request: compute.SetIamPolicySubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Policy: r"""Call the set iam policy method over HTTP. @@ -725,6 +1035,9 @@ def set_iam_policy( Subnetworks.SetIamPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -773,31 +1086,63 @@ def set_iam_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetIamPolicySubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetPolicyRequest.to_json( - request.region_set_policy_request_resource, + compute.RegionSetPolicyRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetIamPolicySubnetworkRequest.to_json( + compute.SetIamPolicySubnetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -808,10 +1153,12 @@ def set_iam_policy( # Return the response return compute.Policy.from_json(response.content, ignore_unknown_fields=True) - def set_private_ip_google_access( + def _set_private_ip_google_access( self, request: compute.SetPrivateIpGoogleAccessSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set private ip google @@ -823,6 +1170,9 @@ def set_private_ip_google_access( Subnetworks.SetPrivateIpGoogleAccess. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -846,33 +1196,67 @@ def set_private_ip_google_access( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess", + "body": "subnetworks_set_private_ip_google_access_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("subnetwork", "subnetwork"), + ] + + request_kwargs = compute.SetPrivateIpGoogleAccessSubnetworkRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SubnetworksSetPrivateIpGoogleAccessRequest.to_json( - request.subnetworks_set_private_ip_google_access_request_resource, + compute.SubnetworksSetPrivateIpGoogleAccessRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess".format( - host=self._host, - project=request.project, - region=request.region, - subnetwork=request.subnetwork, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetPrivateIpGoogleAccessSubnetworkRequest.to_json( + compute.SetPrivateIpGoogleAccessSubnetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetPrivateIpGoogleAccessSubnetworkRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -883,10 +1267,12 @@ def set_private_ip_google_access( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsSubnetworkRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -897,6 +1283,9 @@ def test_iam_permissions( Subnetworks.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -905,31 +1294,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsSubnetworkRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsSubnetworkRequest.to_json( + compute.TestIamPermissionsSubnetworkRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -942,5 +1363,80 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListSubnetworksRequest], compute.SubnetworkAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteSubnetworkRequest], compute.Operation]: + return self._delete + + @property + def expand_ip_cidr_range( + self, + ) -> Callable[[compute.ExpandIpCidrRangeSubnetworkRequest], compute.Operation]: + return self._expand_ip_cidr_range + + @property + def get(self) -> Callable[[compute.GetSubnetworkRequest], compute.Subnetwork]: + return self._get + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicySubnetworkRequest], compute.Policy]: + return self._get_iam_policy + + @property + def insert(self) -> Callable[[compute.InsertSubnetworkRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListSubnetworksRequest], compute.SubnetworkList]: + return self._list + + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableSubnetworksRequest], compute.UsableSubnetworksAggregatedList + ]: + return self._list_usable + + @property + def patch(self) -> Callable[[compute.PatchSubnetworkRequest], compute.Operation]: + return self._patch + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicySubnetworkRequest], compute.Policy]: + return self._set_iam_policy + + @property + def set_private_ip_google_access( + self, + ) -> Callable[ + [compute.SetPrivateIpGoogleAccessSubnetworkRequest], compute.Operation + ]: + return self._set_private_ip_google_access + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsSubnetworkRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("SubnetworksRestTransport",) diff --git a/google/cloud/compute_v1/services/target_grpc_proxies/client.py b/google/cloud/compute_v1/services/target_grpc_proxies/client.py index 5e29f91b1..3771d01bf 100644 --- a/google/cloud/compute_v1/services/target_grpc_proxies/client.py +++ b/google/cloud/compute_v1/services/target_grpc_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_grpc_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetGrpcProxiesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,15 +339,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteTargetGrpcProxyRequest = None, + request: Union[compute.DeleteTargetGrpcProxyRequest, dict] = None, *, project: str = None, target_grpc_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -344,7 +356,7 @@ def delete( scope Args: - request (google.cloud.compute_v1.types.DeleteTargetGrpcProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetGrpcProxyRequest, dict]): The request object. A request message for TargetGrpcProxies.Delete. See the method description for details. @@ -420,11 +432,11 @@ def delete( def get( self, - request: compute.GetTargetGrpcProxyRequest = None, + request: Union[compute.GetTargetGrpcProxyRequest, dict] = None, *, project: str = None, target_grpc_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetGrpcProxy: @@ -432,7 +444,7 @@ def get( given scope. Args: - request (google.cloud.compute_v1.types.GetTargetGrpcProxyRequest): + request (Union[google.cloud.compute_v1.types.GetTargetGrpcProxyRequest, dict]): The request object. A request message for TargetGrpcProxies.Get. See the method description for details. @@ -500,11 +512,11 @@ def get( def insert( self, - request: compute.InsertTargetGrpcProxyRequest = None, + request: Union[compute.InsertTargetGrpcProxyRequest, dict] = None, *, project: str = None, target_grpc_proxy_resource: compute.TargetGrpcProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -513,7 +525,7 @@ def insert( in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetGrpcProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetGrpcProxyRequest, dict]): The request object. A request message for TargetGrpcProxies.Insert. See the method description for details. @@ -587,10 +599,10 @@ def insert( def list( self, - request: compute.ListTargetGrpcProxiesRequest = None, + request: Union[compute.ListTargetGrpcProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -598,7 +610,7 @@ def list( given scope. Args: - request (google.cloud.compute_v1.types.ListTargetGrpcProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListTargetGrpcProxiesRequest, dict]): The request object. A request message for TargetGrpcProxies.List. See the method description for details. @@ -659,12 +671,12 @@ def list( def patch( self, - request: compute.PatchTargetGrpcProxyRequest = None, + request: Union[compute.PatchTargetGrpcProxyRequest, dict] = None, *, project: str = None, target_grpc_proxy: str = None, target_grpc_proxy_resource: compute.TargetGrpcProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -674,7 +686,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchTargetGrpcProxyRequest): + request (Union[google.cloud.compute_v1.types.PatchTargetGrpcProxyRequest, dict]): The request object. A request message for TargetGrpcProxies.Patch. See the method description for details. @@ -757,6 +769,19 @@ def patch( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py b/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py index 1f330c80e..51d895920 100644 --- a/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py +++ b/google/cloud/compute_v1/services/target_grpc_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetGrpcProxyList]: + def pages(self) -> Iterator[compute.TargetGrpcProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetGrpcProxy]: + def __iter__(self) -> Iterator[compute.TargetGrpcProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py b/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py index 575168607..125081cd4 100644 --- a/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/target_grpc_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetGrpcProxiesTransport(abc.ABC): """Abstract transport class for TargetGrpcProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py b/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py index a494ff4b1..427068b37 100644 --- a/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/target_grpc_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetGrpcProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetGrpcProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetGrpcProxiesRestTransport(TargetGrpcProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteTargetGrpcProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( TargetGrpcProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}".format( - host=self._host, - project=request.project, - target_grpc_proxy=request.target_grpc_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_grpc_proxy", "targetGrpcProxy"), + ] + + request_kwargs = compute.DeleteTargetGrpcProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetGrpcProxyRequest.to_json( + compute.DeleteTargetGrpcProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetGrpcProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +223,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetGrpcProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetGrpcProxy: r"""Call the get method over HTTP. @@ -176,6 +239,9 @@ def get( TargetGrpcProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -191,22 +257,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}".format( - host=self._host, - project=request.project, - target_grpc_proxy=request.target_grpc_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_grpc_proxy", "targetGrpcProxy"), + ] + + request_kwargs = compute.GetTargetGrpcProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetGrpcProxyRequest.to_json( + compute.GetTargetGrpcProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -218,10 +315,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetGrpcProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -232,6 +331,9 @@ def insert( TargetGrpcProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -255,30 +357,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetGrpcProxies", + "body": "target_grpc_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertTargetGrpcProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetGrpcProxy.to_json( - request.target_grpc_proxy_resource, + compute.TargetGrpcProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetGrpcProxies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetGrpcProxyRequest.to_json( + compute.InsertTargetGrpcProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetGrpcProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -289,10 +422,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetGrpcProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetGrpcProxyList: r"""Call the list method over HTTP. @@ -303,6 +438,9 @@ def list( TargetGrpcProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -311,30 +449,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetGrpcProxies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetGrpcProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListTargetGrpcProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetGrpcProxiesRequest.to_json( + compute.ListTargetGrpcProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetGrpcProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetGrpcProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetGrpcProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetGrpcProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetGrpcProxiesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -346,10 +508,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchTargetGrpcProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -360,6 +524,9 @@ def patch( TargetGrpcProxies.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -383,32 +550,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}", + "body": "target_grpc_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_grpc_proxy", "targetGrpcProxy"), + ] + + request_kwargs = compute.PatchTargetGrpcProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetGrpcProxy.to_json( - request.target_grpc_proxy_resource, + compute.TargetGrpcProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}".format( - host=self._host, - project=request.project, - target_grpc_proxy=request.target_grpc_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchTargetGrpcProxyRequest.to_json( + compute.PatchTargetGrpcProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchTargetGrpcProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -419,5 +614,38 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetGrpcProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetGrpcProxyRequest], compute.TargetGrpcProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetGrpcProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetGrpcProxiesRequest], compute.TargetGrpcProxyList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchTargetGrpcProxyRequest], compute.Operation]: + return self._patch + + def close(self): + self._session.close() + __all__ = ("TargetGrpcProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/target_http_proxies/client.py b/google/cloud/compute_v1/services/target_http_proxies/client.py index d2ac7cbf0..57b0701dc 100644 --- a/google/cloud/compute_v1/services/target_http_proxies/client.py +++ b/google/cloud/compute_v1/services/target_http_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_http_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetHttpProxiesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListTargetHttpProxiesRequest = None, + request: Union[compute.AggregatedListTargetHttpProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -343,7 +355,7 @@ def aggregated_list( regional and global, available to the specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListTargetHttpProxiesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListTargetHttpProxiesRequest, dict]): The request object. A request message for TargetHttpProxies.AggregatedList. See the method description for details. @@ -406,18 +418,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteTargetHttpProxyRequest = None, + request: Union[compute.DeleteTargetHttpProxyRequest, dict] = None, *, project: str = None, target_http_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetHttpProxy resource. Args: - request (google.cloud.compute_v1.types.DeleteTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetHttpProxyRequest, dict]): The request object. A request message for TargetHttpProxies.Delete. See the method description for details. @@ -493,11 +505,11 @@ def delete( def get( self, - request: compute.GetTargetHttpProxyRequest = None, + request: Union[compute.GetTargetHttpProxyRequest, dict] = None, *, project: str = None, target_http_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxy: @@ -506,7 +518,7 @@ def get( list() request. Args: - request (google.cloud.compute_v1.types.GetTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.GetTargetHttpProxyRequest, dict]): The request object. A request message for TargetHttpProxies.Get. See the method description for details. @@ -580,11 +592,11 @@ def get( def insert( self, - request: compute.InsertTargetHttpProxyRequest = None, + request: Union[compute.InsertTargetHttpProxyRequest, dict] = None, *, project: str = None, target_http_proxy_resource: compute.TargetHttpProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -592,7 +604,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetHttpProxyRequest, dict]): The request object. A request message for TargetHttpProxies.Insert. See the method description for details. @@ -666,10 +678,10 @@ def insert( def list( self, - request: compute.ListTargetHttpProxiesRequest = None, + request: Union[compute.ListTargetHttpProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -677,7 +689,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListTargetHttpProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListTargetHttpProxiesRequest, dict]): The request object. A request message for TargetHttpProxies.List. See the method description for details. @@ -739,12 +751,12 @@ def list( def patch( self, - request: compute.PatchTargetHttpProxyRequest = None, + request: Union[compute.PatchTargetHttpProxyRequest, dict] = None, *, project: str = None, target_http_proxy: str = None, target_http_proxy_resource: compute.TargetHttpProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -754,7 +766,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.PatchTargetHttpProxyRequest, dict]): The request object. A request message for TargetHttpProxies.Patch. See the method description for details. @@ -839,19 +851,19 @@ def patch( def set_url_map( self, - request: compute.SetUrlMapTargetHttpProxyRequest = None, + request: Union[compute.SetUrlMapTargetHttpProxyRequest, dict] = None, *, project: str = None, target_http_proxy: str = None, url_map_reference_resource: compute.UrlMapReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the URL map for TargetHttpProxy. Args: - request (google.cloud.compute_v1.types.SetUrlMapTargetHttpProxyRequest): + request (Union[google.cloud.compute_v1.types.SetUrlMapTargetHttpProxyRequest, dict]): The request object. A request message for TargetHttpProxies.SetUrlMap. See the method description for details. @@ -934,6 +946,19 @@ def set_url_map( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_http_proxies/pagers.py b/google/cloud/compute_v1/services/target_http_proxies/pagers.py index d8a8c0afc..8ebd1447d 100644 --- a/google/cloud/compute_v1/services/target_http_proxies/pagers.py +++ b/google/cloud/compute_v1/services/target_http_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetHttpProxyAggregatedList]: + def pages(self) -> Iterator[compute.TargetHttpProxyAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.TargetHttpProxiesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.TargetHttpProxiesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetHttpProxyList]: + def pages(self) -> Iterator[compute.TargetHttpProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetHttpProxy]: + def __iter__(self) -> Iterator[compute.TargetHttpProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_http_proxies/transports/base.py b/google/cloud/compute_v1/services/target_http_proxies/transports/base.py index efb28747b..fbd9bab50 100644 --- a/google/cloud/compute_v1/services/target_http_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/target_http_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetHttpProxiesTransport(abc.ABC): """Abstract transport class for TargetHttpProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -181,6 +145,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py b/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py index 5e6fb576f..07f329ea9 100644 --- a/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/target_http_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetHttpProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetHttpProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetHttpProxiesRestTransport(TargetHttpProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListTargetHttpProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxyAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( TargetHttpProxies.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/targetHttpProxies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/targetHttpProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListTargetHttpProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListTargetHttpProxiesRequest.to_json( + compute.AggregatedListTargetHttpProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListTargetHttpProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListTargetHttpProxiesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListTargetHttpProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListTargetHttpProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListTargetHttpProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListTargetHttpProxiesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( TargetHttpProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,24 +251,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}".format( - host=self._host, - project=request.project, - target_http_proxy=request.target_http_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.DeleteTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetHttpProxyRequest.to_json( + compute.DeleteTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -224,10 +309,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxy: r"""Call the get method over HTTP. @@ -238,6 +325,9 @@ def get( TargetHttpProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -259,22 +349,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}".format( - host=self._host, - project=request.project, - target_http_proxy=request.target_http_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.GetTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetHttpProxyRequest.to_json( + compute.GetTargetHttpProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -286,10 +407,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -300,6 +423,9 @@ def insert( TargetHttpProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -323,30 +449,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetHttpProxies", + "body": "target_http_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpProxy.to_json( - request.target_http_proxy_resource, + compute.TargetHttpProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpProxies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetHttpProxyRequest.to_json( + compute.InsertTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -357,10 +514,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetHttpProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpProxyList: r"""Call the list method over HTTP. @@ -371,6 +530,9 @@ def list( TargetHttpProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -379,30 +541,54 @@ def list( A list of TargetHttpProxy resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpProxies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetHttpProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListTargetHttpProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetHttpProxiesRequest.to_json( + compute.ListTargetHttpProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetHttpProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetHttpProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetHttpProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetHttpProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetHttpProxiesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -414,10 +600,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -428,6 +616,9 @@ def patch( TargetHttpProxies.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -451,32 +642,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}", + "body": "target_http_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.PatchTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpProxy.to_json( - request.target_http_proxy_resource, + compute.TargetHttpProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}".format( - host=self._host, - project=request.project, - target_http_proxy=request.target_http_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchTargetHttpProxyRequest.to_json( + compute.PatchTargetHttpProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -487,10 +706,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_url_map( + def _set_url_map( self, request: compute.SetUrlMapTargetHttpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set url map method over HTTP. @@ -501,6 +722,9 @@ def set_url_map( TargetHttpProxies.SetUrlMap. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -524,32 +748,62 @@ def set_url_map( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap", + "body": "url_map_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_http_proxy", "targetHttpProxy"), + ] + + request_kwargs = compute.SetUrlMapTargetHttpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMapReference.to_json( - request.url_map_reference_resource, + compute.UrlMapReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap".format( - host=self._host, - project=request.project, - target_http_proxy=request.target_http_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetUrlMapTargetHttpProxyRequest.to_json( + compute.SetUrlMapTargetHttpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetUrlMapTargetHttpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -560,5 +814,53 @@ def set_url_map( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListTargetHttpProxiesRequest], + compute.TargetHttpProxyAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetHttpProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetHttpProxyRequest], compute.TargetHttpProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetHttpProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetHttpProxiesRequest], compute.TargetHttpProxyList]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchTargetHttpProxyRequest], compute.Operation]: + return self._patch + + @property + def set_url_map( + self, + ) -> Callable[[compute.SetUrlMapTargetHttpProxyRequest], compute.Operation]: + return self._set_url_map + + def close(self): + self._session.close() + __all__ = ("TargetHttpProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/target_https_proxies/client.py b/google/cloud/compute_v1/services/target_https_proxies/client.py index 1dedef100..42fcde515 100644 --- a/google/cloud/compute_v1/services/target_https_proxies/client.py +++ b/google/cloud/compute_v1/services/target_https_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_https_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,14 +339,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListTargetHttpsProxiesRequest = None, + request: Union[compute.AggregatedListTargetHttpsProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -343,7 +355,7 @@ def aggregated_list( regional and global, available to the specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListTargetHttpsProxiesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListTargetHttpsProxiesRequest, dict]): The request object. A request message for TargetHttpsProxies.AggregatedList. See the method description for details. @@ -406,18 +418,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteTargetHttpsProxyRequest = None, + request: Union[compute.DeleteTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetHttpsProxy resource. Args: - request (google.cloud.compute_v1.types.DeleteTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.Delete. See the method description for details. @@ -493,11 +505,11 @@ def delete( def get( self, - request: compute.GetTargetHttpsProxyRequest = None, + request: Union[compute.GetTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxy: @@ -506,7 +518,7 @@ def get( list() request. Args: - request (google.cloud.compute_v1.types.GetTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.GetTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.Get. See the method description for details. @@ -579,11 +591,11 @@ def get( def insert( self, - request: compute.InsertTargetHttpsProxyRequest = None, + request: Union[compute.InsertTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy_resource: compute.TargetHttpsProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -591,7 +603,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.Insert. See the method description for details. @@ -665,10 +677,10 @@ def insert( def list( self, - request: compute.ListTargetHttpsProxiesRequest = None, + request: Union[compute.ListTargetHttpsProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -676,7 +688,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListTargetHttpsProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListTargetHttpsProxiesRequest, dict]): The request object. A request message for TargetHttpsProxies.List. See the method description for details. @@ -739,12 +751,12 @@ def list( def patch( self, - request: compute.PatchTargetHttpsProxyRequest = None, + request: Union[compute.PatchTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, target_https_proxy_resource: compute.TargetHttpsProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -754,7 +766,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.PatchTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.Patch. See the method description for details. @@ -839,19 +851,19 @@ def patch( def set_quic_override( self, - request: compute.SetQuicOverrideTargetHttpsProxyRequest = None, + request: Union[compute.SetQuicOverrideTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, target_https_proxies_set_quic_override_request_resource: compute.TargetHttpsProxiesSetQuicOverrideRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Sets the QUIC override policy for TargetHttpsProxy. Args: - request (google.cloud.compute_v1.types.SetQuicOverrideTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.SetQuicOverrideTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.SetQuicOverride. See the method description for details. @@ -943,19 +955,19 @@ def set_quic_override( def set_ssl_certificates( self, - request: compute.SetSslCertificatesTargetHttpsProxyRequest = None, + request: Union[compute.SetSslCertificatesTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, target_https_proxies_set_ssl_certificates_request_resource: compute.TargetHttpsProxiesSetSslCertificatesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Replaces SslCertificates for TargetHttpsProxy. Args: - request (google.cloud.compute_v1.types.SetSslCertificatesTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.SetSslCertificatesTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.SetSslCertificates. See the method description for details. @@ -1046,12 +1058,12 @@ def set_ssl_certificates( def set_ssl_policy( self, - request: compute.SetSslPolicyTargetHttpsProxyRequest = None, + request: Union[compute.SetSslPolicyTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, ssl_policy_reference_resource: compute.SslPolicyReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -1062,7 +1074,7 @@ def set_ssl_policy( connection between the load balancer and the backends. Args: - request (google.cloud.compute_v1.types.SetSslPolicyTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.SetSslPolicyTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.SetSslPolicy. See the method description for details. @@ -1149,19 +1161,19 @@ def set_ssl_policy( def set_url_map( self, - request: compute.SetUrlMapTargetHttpsProxyRequest = None, + request: Union[compute.SetUrlMapTargetHttpsProxyRequest, dict] = None, *, project: str = None, target_https_proxy: str = None, url_map_reference_resource: compute.UrlMapReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the URL map for TargetHttpsProxy. Args: - request (google.cloud.compute_v1.types.SetUrlMapTargetHttpsProxyRequest): + request (Union[google.cloud.compute_v1.types.SetUrlMapTargetHttpsProxyRequest, dict]): The request object. A request message for TargetHttpsProxies.SetUrlMap. See the method description for details. @@ -1244,6 +1256,19 @@ def set_url_map( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_https_proxies/pagers.py b/google/cloud/compute_v1/services/target_https_proxies/pagers.py index a1aff1e2f..ed1f70a39 100644 --- a/google/cloud/compute_v1/services/target_https_proxies/pagers.py +++ b/google/cloud/compute_v1/services/target_https_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetHttpsProxyAggregatedList]: + def pages(self) -> Iterator[compute.TargetHttpsProxyAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.TargetHttpsProxiesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.TargetHttpsProxiesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetHttpsProxyList]: + def pages(self) -> Iterator[compute.TargetHttpsProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetHttpsProxy]: + def __iter__(self) -> Iterator[compute.TargetHttpsProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_https_proxies/transports/base.py b/google/cloud/compute_v1/services/target_https_proxies/transports/base.py index efe301a6a..b93890ff2 100644 --- a/google/cloud/compute_v1/services/target_https_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/target_https_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetHttpsProxiesTransport(abc.ABC): """Abstract transport class for TargetHttpsProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -192,6 +156,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py b/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py index 145f84287..6180963b6 100644 --- a/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/target_https_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetHttpsProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetHttpsProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetHttpsProxiesRestTransport(TargetHttpsProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListTargetHttpsProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxyAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( TargetHttpsProxies.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,38 +150,56 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/targetHttpsProxies".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListTargetHttpsProxiesRequest.filter in request: - query_params["filter"] = request.filter - if ( - compute.AggregatedListTargetHttpsProxiesRequest.include_all_scopes - in request - ): - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListTargetHttpsProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListTargetHttpsProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListTargetHttpsProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListTargetHttpsProxiesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/targetHttpsProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListTargetHttpsProxiesRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListTargetHttpsProxiesRequest.to_json( + compute.AggregatedListTargetHttpsProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -163,10 +211,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -177,6 +227,9 @@ def delete( TargetHttpsProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -200,24 +253,55 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.DeleteTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetHttpsProxyRequest.to_json( + compute.DeleteTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -227,10 +311,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxy: r"""Call the get method over HTTP. @@ -241,6 +327,9 @@ def get( TargetHttpsProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -261,22 +350,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.GetTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetHttpsProxyRequest.to_json( + compute.GetTargetHttpsProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -288,10 +408,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -302,6 +424,9 @@ def insert( TargetHttpsProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -325,30 +450,61 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies", + "body": "target_https_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpsProxy.to_json( - request.target_https_proxy_resource, + compute.TargetHttpsProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetHttpsProxyRequest.to_json( + compute.InsertTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -359,10 +515,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetHttpsProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetHttpsProxyList: r"""Call the list method over HTTP. @@ -373,6 +531,9 @@ def list( TargetHttpsProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -383,30 +544,54 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetHttpsProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetHttpsProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetHttpsProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetHttpsProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetHttpsProxiesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListTargetHttpsProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetHttpsProxiesRequest.to_json( + compute.ListTargetHttpsProxiesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -418,10 +603,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -432,6 +619,9 @@ def patch( TargetHttpsProxies.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -455,32 +645,62 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}", + "body": "target_https_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.PatchTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpsProxy.to_json( - request.target_https_proxy_resource, + compute.TargetHttpsProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchTargetHttpsProxyRequest.to_json( + compute.PatchTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -491,10 +711,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_quic_override( + def _set_quic_override( self, request: compute.SetQuicOverrideTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set quic override method over HTTP. @@ -505,6 +727,9 @@ def set_quic_override( TargetHttpsProxies.SetQuicOverride. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -528,32 +753,64 @@ def set_quic_override( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setQuicOverride", + "body": "target_https_proxies_set_quic_override_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.SetQuicOverrideTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpsProxiesSetQuicOverrideRequest.to_json( - request.target_https_proxies_set_quic_override_request_resource, + compute.TargetHttpsProxiesSetQuicOverrideRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setQuicOverride".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetQuicOverrideTargetHttpsProxyRequest.to_json( + compute.SetQuicOverrideTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetQuicOverrideTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -564,10 +821,12 @@ def set_quic_override( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_ssl_certificates( + def _set_ssl_certificates( self, request: compute.SetSslCertificatesTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set ssl certificates method over HTTP. @@ -578,6 +837,9 @@ def set_ssl_certificates( TargetHttpsProxies.SetSslCertificates. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -601,32 +863,66 @@ def set_ssl_certificates( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setSslCertificates", + "body": "target_https_proxies_set_ssl_certificates_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.SetSslCertificatesTargetHttpsProxyRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetHttpsProxiesSetSslCertificatesRequest.to_json( - request.target_https_proxies_set_ssl_certificates_request_resource, + compute.TargetHttpsProxiesSetSslCertificatesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setSslCertificates".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSslCertificatesTargetHttpsProxyRequest.to_json( + compute.SetSslCertificatesTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetSslCertificatesTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -637,10 +933,12 @@ def set_ssl_certificates( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_ssl_policy( + def _set_ssl_policy( self, request: compute.SetSslPolicyTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set ssl policy method over HTTP. @@ -651,6 +949,9 @@ def set_ssl_policy( TargetHttpsProxies.SetSslPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -674,32 +975,62 @@ def set_ssl_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setSslPolicy", + "body": "ssl_policy_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.SetSslPolicyTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SslPolicyReference.to_json( - request.ssl_policy_reference_resource, + compute.SslPolicyReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setSslPolicy".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSslPolicyTargetHttpsProxyRequest.to_json( + compute.SetSslPolicyTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetSslPolicyTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -710,10 +1041,12 @@ def set_ssl_policy( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_url_map( + def _set_url_map( self, request: compute.SetUrlMapTargetHttpsProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set url map method over HTTP. @@ -724,6 +1057,9 @@ def set_url_map( TargetHttpsProxies.SetUrlMap. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -747,32 +1083,62 @@ def set_url_map( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setUrlMap", + "body": "url_map_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_https_proxy", "targetHttpsProxy"), + ] + + request_kwargs = compute.SetUrlMapTargetHttpsProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMapReference.to_json( - request.url_map_reference_resource, + compute.UrlMapReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setUrlMap".format( - host=self._host, - project=request.project, - target_https_proxy=request.target_https_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetUrlMapTargetHttpsProxyRequest.to_json( + compute.SetUrlMapTargetHttpsProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetUrlMapTargetHttpsProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -783,5 +1149,75 @@ def set_url_map( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListTargetHttpsProxiesRequest], + compute.TargetHttpsProxyAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetHttpsProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetHttpsProxyRequest], compute.TargetHttpsProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetHttpsProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[ + [compute.ListTargetHttpsProxiesRequest], compute.TargetHttpsProxyList + ]: + return self._list + + @property + def patch( + self, + ) -> Callable[[compute.PatchTargetHttpsProxyRequest], compute.Operation]: + return self._patch + + @property + def set_quic_override( + self, + ) -> Callable[[compute.SetQuicOverrideTargetHttpsProxyRequest], compute.Operation]: + return self._set_quic_override + + @property + def set_ssl_certificates( + self, + ) -> Callable[ + [compute.SetSslCertificatesTargetHttpsProxyRequest], compute.Operation + ]: + return self._set_ssl_certificates + + @property + def set_ssl_policy( + self, + ) -> Callable[[compute.SetSslPolicyTargetHttpsProxyRequest], compute.Operation]: + return self._set_ssl_policy + + @property + def set_url_map( + self, + ) -> Callable[[compute.SetUrlMapTargetHttpsProxyRequest], compute.Operation]: + return self._set_url_map + + def close(self): + self._session.close() + __all__ = ("TargetHttpsProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/target_instances/client.py b/google/cloud/compute_v1/services/target_instances/client.py index bde61e196..a11416c38 100644 --- a/google/cloud/compute_v1/services/target_instances/client.py +++ b/google/cloud/compute_v1/services/target_instances/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_instances import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetInstancesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,21 +337,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListTargetInstancesRequest = None, + request: Union[compute.AggregatedListTargetInstancesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of target instances. Args: - request (google.cloud.compute_v1.types.AggregatedListTargetInstancesRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListTargetInstancesRequest, dict]): The request object. A request message for TargetInstances.AggregatedList. See the method description for details. @@ -401,19 +413,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteTargetInstanceRequest = None, + request: Union[compute.DeleteTargetInstanceRequest, dict] = None, *, project: str = None, zone: str = None, target_instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetInstance resource. Args: - request (google.cloud.compute_v1.types.DeleteTargetInstanceRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetInstanceRequest, dict]): The request object. A request message for TargetInstances.Delete. See the method description for details. @@ -498,12 +510,12 @@ def delete( def get( self, - request: compute.GetTargetInstanceRequest = None, + request: Union[compute.GetTargetInstanceRequest, dict] = None, *, project: str = None, zone: str = None, target_instance: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetInstance: @@ -512,7 +524,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetTargetInstanceRequest): + request (Union[google.cloud.compute_v1.types.GetTargetInstanceRequest, dict]): The request object. A request message for TargetInstances.Get. See the method description for details. @@ -590,12 +602,12 @@ def get( def insert( self, - request: compute.InsertTargetInstanceRequest = None, + request: Union[compute.InsertTargetInstanceRequest, dict] = None, *, project: str = None, zone: str = None, target_instance_resource: compute.TargetInstance = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -603,7 +615,7 @@ def insert( project and zone using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetInstanceRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetInstanceRequest, dict]): The request object. A request message for TargetInstances.Insert. See the method description for details. @@ -686,11 +698,11 @@ def insert( def list( self, - request: compute.ListTargetInstancesRequest = None, + request: Union[compute.ListTargetInstancesRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -698,7 +710,7 @@ def list( available to the specified project and zone. Args: - request (google.cloud.compute_v1.types.ListTargetInstancesRequest): + request (Union[google.cloud.compute_v1.types.ListTargetInstancesRequest, dict]): The request object. A request message for TargetInstances.List. See the method description for details. @@ -768,6 +780,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_instances/pagers.py b/google/cloud/compute_v1/services/target_instances/pagers.py index 7902e9c82..c02e1f454 100644 --- a/google/cloud/compute_v1/services/target_instances/pagers.py +++ b/google/cloud/compute_v1/services/target_instances/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetInstanceAggregatedList]: + def pages(self) -> Iterator[compute.TargetInstanceAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.TargetInstancesScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.TargetInstancesScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetInstanceList]: + def pages(self) -> Iterator[compute.TargetInstanceList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetInstance]: + def __iter__(self) -> Iterator[compute.TargetInstance]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_instances/transports/base.py b/google/cloud/compute_v1/services/target_instances/transports/base.py index 3f6318c9e..3e7b781c1 100644 --- a/google/cloud/compute_v1/services/target_instances/transports/base.py +++ b/google/cloud/compute_v1/services/target_instances/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetInstancesTransport(abc.ABC): """Abstract transport class for TargetInstances.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/target_instances/transports/rest.py b/google/cloud/compute_v1/services/target_instances/transports/rest.py index 6c261ec8a..c3f4ad25e 100644 --- a/google/cloud/compute_v1/services/target_instances/transports/rest.py +++ b/google/cloud/compute_v1/services/target_instances/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetInstancesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetInstancesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetInstancesRestTransport(TargetInstancesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListTargetInstancesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetInstanceAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( TargetInstances.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/targetInstances".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/targetInstances", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListTargetInstancesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListTargetInstancesRequest.to_json( + compute.AggregatedListTargetInstancesRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListTargetInstancesRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListTargetInstancesRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListTargetInstancesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListTargetInstancesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListTargetInstancesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListTargetInstancesRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteTargetInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( TargetInstances.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,25 +251,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}".format( - host=self._host, - project=request.project, - zone=request.zone, - target_instance=request.target_instance, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_instance", "targetInstance"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteTargetInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetInstanceRequest.to_json( + compute.DeleteTargetInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +308,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetInstance: r"""Call the get method over HTTP. @@ -239,6 +324,9 @@ def get( TargetInstances.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -255,23 +343,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}".format( - host=self._host, - project=request.project, - zone=request.zone, - target_instance=request.target_instance, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_instance", "targetInstance"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetTargetInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetInstanceRequest.to_json( + compute.GetTargetInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -283,10 +402,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetInstanceRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -297,6 +418,9 @@ def insert( TargetInstances.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -320,30 +444,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/targetInstances", + "body": "target_instance_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.InsertTargetInstanceRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetInstance.to_json( - request.target_instance_resource, + compute.TargetInstance(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/targetInstances".format( - host=self._host, project=request.project, zone=request.zone, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetInstanceRequest.to_json( + compute.InsertTargetInstanceRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetInstanceRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -354,10 +508,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetInstancesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetInstanceList: r"""Call the list method over HTTP. @@ -368,6 +524,9 @@ def list( TargetInstances.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -378,30 +537,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/targetInstances".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/targetInstances", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListTargetInstancesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetInstancesRequest.to_json( + compute.ListTargetInstancesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetInstancesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetInstancesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetInstancesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetInstancesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetInstancesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -413,5 +595,41 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListTargetInstancesRequest], + compute.TargetInstanceAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetInstanceRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetInstanceRequest], compute.TargetInstance]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetInstanceRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetInstancesRequest], compute.TargetInstanceList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("TargetInstancesRestTransport",) diff --git a/google/cloud/compute_v1/services/target_pools/client.py b/google/cloud/compute_v1/services/target_pools/client.py index 9669e8b2d..f613de58f 100644 --- a/google/cloud/compute_v1/services/target_pools/client.py +++ b/google/cloud/compute_v1/services/target_pools/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_pools import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetPoolsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,24 +335,25 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def add_health_check( self, - request: compute.AddHealthCheckTargetPoolRequest = None, + request: Union[compute.AddHealthCheckTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, target_pools_add_health_check_request_resource: compute.TargetPoolsAddHealthCheckRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Adds health check URLs to a target pool. Args: - request (google.cloud.compute_v1.types.AddHealthCheckTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.AddHealthCheckTargetPoolRequest, dict]): The request object. A request message for TargetPools.AddHealthCheck. See the method description for details. @@ -442,20 +454,20 @@ def add_health_check( def add_instance( self, - request: compute.AddInstanceTargetPoolRequest = None, + request: Union[compute.AddInstanceTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, target_pools_add_instance_request_resource: compute.TargetPoolsAddInstanceRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Adds an instance to a target pool. Args: - request (google.cloud.compute_v1.types.AddInstanceTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.AddInstanceTargetPoolRequest, dict]): The request object. A request message for TargetPools.AddInstance. See the method description for details. @@ -551,17 +563,17 @@ def add_instance( def aggregated_list( self, - request: compute.AggregatedListTargetPoolsRequest = None, + request: Union[compute.AggregatedListTargetPoolsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of target pools. Args: - request (google.cloud.compute_v1.types.AggregatedListTargetPoolsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListTargetPoolsRequest, dict]): The request object. A request message for TargetPools.AggregatedList. See the method description for details. @@ -622,19 +634,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteTargetPoolRequest = None, + request: Union[compute.DeleteTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified target pool. Args: - request (google.cloud.compute_v1.types.DeleteTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetPoolRequest, dict]): The request object. A request message for TargetPools.Delete. See the method description for details. @@ -719,12 +731,12 @@ def delete( def get( self, - request: compute.GetTargetPoolRequest = None, + request: Union[compute.GetTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetPool: @@ -732,7 +744,7 @@ def get( available target pools by making a list() request. Args: - request (google.cloud.compute_v1.types.GetTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.GetTargetPoolRequest, dict]): The request object. A request message for TargetPools.Get. See the method description for details. project (str): @@ -809,13 +821,13 @@ def get( def get_health( self, - request: compute.GetHealthTargetPoolRequest = None, + request: Union[compute.GetHealthTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, instance_reference_resource: compute.InstanceReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetPoolInstanceHealth: @@ -824,7 +836,7 @@ def get_health( pool. Args: - request (google.cloud.compute_v1.types.GetHealthTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.GetHealthTargetPoolRequest, dict]): The request object. A request message for TargetPools.GetHealth. See the method description for details. @@ -903,12 +915,12 @@ def get_health( def insert( self, - request: compute.InsertTargetPoolRequest = None, + request: Union[compute.InsertTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool_resource: compute.TargetPool = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -916,7 +928,7 @@ def insert( region using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetPoolRequest, dict]): The request object. A request message for TargetPools.Insert. See the method description for details. @@ -999,11 +1011,11 @@ def insert( def list( self, - request: compute.ListTargetPoolsRequest = None, + request: Union[compute.ListTargetPoolsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -1011,7 +1023,7 @@ def list( specified project and region. Args: - request (google.cloud.compute_v1.types.ListTargetPoolsRequest): + request (Union[google.cloud.compute_v1.types.ListTargetPoolsRequest, dict]): The request object. A request message for TargetPools.List. See the method description for details. @@ -1083,20 +1095,20 @@ def list( def remove_health_check( self, - request: compute.RemoveHealthCheckTargetPoolRequest = None, + request: Union[compute.RemoveHealthCheckTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, target_pools_remove_health_check_request_resource: compute.TargetPoolsRemoveHealthCheckRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Removes health check URL from a target pool. Args: - request (google.cloud.compute_v1.types.RemoveHealthCheckTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.RemoveHealthCheckTargetPoolRequest, dict]): The request object. A request message for TargetPools.RemoveHealthCheck. See the method description for details. @@ -1195,20 +1207,20 @@ def remove_health_check( def remove_instance( self, - request: compute.RemoveInstanceTargetPoolRequest = None, + request: Union[compute.RemoveInstanceTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, target_pools_remove_instance_request_resource: compute.TargetPoolsRemoveInstanceRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Removes instance URL from a target pool. Args: - request (google.cloud.compute_v1.types.RemoveInstanceTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.RemoveInstanceTargetPoolRequest, dict]): The request object. A request message for TargetPools.RemoveInstance. See the method description for details. @@ -1309,20 +1321,20 @@ def remove_instance( def set_backup( self, - request: compute.SetBackupTargetPoolRequest = None, + request: Union[compute.SetBackupTargetPoolRequest, dict] = None, *, project: str = None, region: str = None, target_pool: str = None, target_reference_resource: compute.TargetReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes a backup target pool's configurations. Args: - request (google.cloud.compute_v1.types.SetBackupTargetPoolRequest): + request (Union[google.cloud.compute_v1.types.SetBackupTargetPoolRequest, dict]): The request object. A request message for TargetPools.SetBackup. See the method description for details. @@ -1414,6 +1426,19 @@ def set_backup( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_pools/pagers.py b/google/cloud/compute_v1/services/target_pools/pagers.py index 5dfaa09fc..17c90ee04 100644 --- a/google/cloud/compute_v1/services/target_pools/pagers.py +++ b/google/cloud/compute_v1/services/target_pools/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetPoolAggregatedList]: + def pages(self) -> Iterator[compute.TargetPoolAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.TargetPoolsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.TargetPoolsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetPoolList]: + def pages(self) -> Iterator[compute.TargetPoolList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetPool]: + def __iter__(self) -> Iterator[compute.TargetPool]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_pools/transports/base.py b/google/cloud/compute_v1/services/target_pools/transports/base.py index 04f55ad82..2854adbe2 100644 --- a/google/cloud/compute_v1/services/target_pools/transports/base.py +++ b/google/cloud/compute_v1/services/target_pools/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetPoolsTransport(abc.ABC): """Abstract transport class for TargetPools.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -193,6 +157,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def add_health_check( self, diff --git a/google/cloud/compute_v1/services/target_pools/transports/rest.py b/google/cloud/compute_v1/services/target_pools/transports/rest.py index c5cc3eb6c..79be0825d 100644 --- a/google/cloud/compute_v1/services/target_pools/transports/rest.py +++ b/google/cloud/compute_v1/services/target_pools/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import TargetPoolsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import TargetPoolsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetPoolsRestTransport(TargetPoolsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def add_health_check( + def _add_health_check( self, request: compute.AddHealthCheckTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add health check method over HTTP. @@ -112,6 +136,9 @@ def add_health_check( TargetPools.AddHealthCheck. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,33 +162,63 @@ def add_health_check( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addHealthCheck", + "body": "target_pools_add_health_check_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.AddHealthCheckTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetPoolsAddHealthCheckRequest.to_json( - request.target_pools_add_health_check_request_resource, + compute.TargetPoolsAddHealthCheckRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addHealthCheck".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddHealthCheckTargetPoolRequest.to_json( + compute.AddHealthCheckTargetPoolRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddHealthCheckTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -172,10 +229,12 @@ def add_health_check( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def add_instance( + def _add_instance( self, request: compute.AddInstanceTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the add instance method over HTTP. @@ -186,6 +245,9 @@ def add_instance( TargetPools.AddInstance. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -209,33 +271,63 @@ def add_instance( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addInstance", + "body": "target_pools_add_instance_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.AddInstanceTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetPoolsAddInstanceRequest.to_json( - request.target_pools_add_instance_request_resource, + compute.TargetPoolsAddInstanceRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addInstance".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AddInstanceTargetPoolRequest.to_json( + compute.AddInstanceTargetPoolRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AddInstanceTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -246,10 +338,12 @@ def add_instance( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListTargetPoolsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetPoolAggregatedList: r"""Call the aggregated list method over HTTP. @@ -260,6 +354,9 @@ def aggregated_list( TargetPools.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -268,32 +365,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/targetPools".format( - host=self._host, project=request.project, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListTargetPoolsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListTargetPoolsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListTargetPoolsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListTargetPoolsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListTargetPoolsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListTargetPoolsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/targetPools", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListTargetPoolsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListTargetPoolsRequest.to_json( + compute.AggregatedListTargetPoolsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -305,10 +424,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -319,6 +440,9 @@ def delete( TargetPools.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -342,25 +466,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.DeleteTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetPoolRequest.to_json( + compute.DeleteTargetPoolRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -370,10 +523,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetPool: r"""Call the get method over HTTP. @@ -384,6 +539,9 @@ def get( TargetPools.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -400,23 +558,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.GetTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetPoolRequest.to_json( + compute.GetTargetPoolRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -428,10 +617,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_health( + def _get_health( self, request: compute.GetHealthTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetPoolInstanceHealth: r"""Call the get health method over HTTP. @@ -442,6 +633,9 @@ def get_health( TargetPools.GetHealth. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -450,31 +644,61 @@ def get_health( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/getHealth", + "body": "instance_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.GetHealthTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.InstanceReference.to_json( - request.instance_reference_resource, + compute.InstanceReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/getHealth".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetHealthTargetPoolRequest.to_json( + compute.GetHealthTargetPoolRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -487,10 +711,12 @@ def get_health( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -501,6 +727,9 @@ def insert( TargetPools.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -524,30 +753,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools", + "body": "target_pool_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetPool.to_json( - request.target_pool_resource, + compute.TargetPool(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetPoolRequest.to_json( + compute.InsertTargetPoolRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -558,10 +817,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetPoolsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetPoolList: r"""Call the list method over HTTP. @@ -572,6 +833,9 @@ def list( TargetPools.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -582,30 +846,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools".format( - host=self._host, project=request.project, region=request.region, - ) - - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetPoolsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetPoolsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetPoolsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetPoolsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetPoolsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListTargetPoolsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetPoolsRequest.to_json( + compute.ListTargetPoolsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) + ) + + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -617,10 +904,12 @@ def list( response.content, ignore_unknown_fields=True ) - def remove_health_check( + def _remove_health_check( self, request: compute.RemoveHealthCheckTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove health check method over HTTP. @@ -631,6 +920,9 @@ def remove_health_check( TargetPools.RemoveHealthCheck. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -654,33 +946,63 @@ def remove_health_check( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeHealthCheck", + "body": "target_pools_remove_health_check_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.RemoveHealthCheckTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetPoolsRemoveHealthCheckRequest.to_json( - request.target_pools_remove_health_check_request_resource, + compute.TargetPoolsRemoveHealthCheckRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeHealthCheck".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveHealthCheckTargetPoolRequest.to_json( + compute.RemoveHealthCheckTargetPoolRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveHealthCheckTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -691,10 +1013,12 @@ def remove_health_check( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def remove_instance( + def _remove_instance( self, request: compute.RemoveInstanceTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the remove instance method over HTTP. @@ -705,6 +1029,9 @@ def remove_instance( TargetPools.RemoveInstance. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -728,33 +1055,63 @@ def remove_instance( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeInstance", + "body": "target_pools_remove_instance_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.RemoveInstanceTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetPoolsRemoveInstanceRequest.to_json( - request.target_pools_remove_instance_request_resource, + compute.TargetPoolsRemoveInstanceRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeInstance".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.RemoveInstanceTargetPoolRequest.to_json( + compute.RemoveInstanceTargetPoolRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.RemoveInstanceTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -765,10 +1122,12 @@ def remove_instance( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_backup( + def _set_backup( self, request: compute.SetBackupTargetPoolRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set backup method over HTTP. @@ -779,6 +1138,9 @@ def set_backup( TargetPools.SetBackup. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -802,35 +1164,61 @@ def set_backup( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setBackup", + "body": "target_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_pool", "targetPool"), + ] + + request_kwargs = compute.SetBackupTargetPoolRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetReference.to_json( - request.target_reference_resource, + compute.TargetReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setBackup".format( - host=self._host, - project=request.project, - region=request.region, - target_pool=request.target_pool, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetBackupTargetPoolRequest.to_json( + compute.SetBackupTargetPoolRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetBackupTargetPoolRequest.failover_ratio in request: - query_params["failoverRatio"] = request.failover_ratio - if compute.SetBackupTargetPoolRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -841,5 +1229,72 @@ def set_backup( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def add_health_check( + self, + ) -> Callable[[compute.AddHealthCheckTargetPoolRequest], compute.Operation]: + return self._add_health_check + + @property + def add_instance( + self, + ) -> Callable[[compute.AddInstanceTargetPoolRequest], compute.Operation]: + return self._add_instance + + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListTargetPoolsRequest], compute.TargetPoolAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteTargetPoolRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetTargetPoolRequest], compute.TargetPool]: + return self._get + + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthTargetPoolRequest], compute.TargetPoolInstanceHealth + ]: + return self._get_health + + @property + def insert(self) -> Callable[[compute.InsertTargetPoolRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetPoolsRequest], compute.TargetPoolList]: + return self._list + + @property + def remove_health_check( + self, + ) -> Callable[[compute.RemoveHealthCheckTargetPoolRequest], compute.Operation]: + return self._remove_health_check + + @property + def remove_instance( + self, + ) -> Callable[[compute.RemoveInstanceTargetPoolRequest], compute.Operation]: + return self._remove_instance + + @property + def set_backup( + self, + ) -> Callable[[compute.SetBackupTargetPoolRequest], compute.Operation]: + return self._set_backup + + def close(self): + self._session.close() + __all__ = ("TargetPoolsRestTransport",) diff --git a/google/cloud/compute_v1/services/target_ssl_proxies/client.py b/google/cloud/compute_v1/services/target_ssl_proxies/client.py index bcb4c4176..789fd6628 100644 --- a/google/cloud/compute_v1/services/target_ssl_proxies/client.py +++ b/google/cloud/compute_v1/services/target_ssl_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_ssl_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetSslProxiesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,22 +337,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteTargetSslProxyRequest = None, + request: Union[compute.DeleteTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetSslProxy resource. Args: - request (google.cloud.compute_v1.types.DeleteTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.Delete. See the method description for details. @@ -417,11 +429,11 @@ def delete( def get( self, - request: compute.GetTargetSslProxyRequest = None, + request: Union[compute.GetTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetSslProxy: @@ -430,7 +442,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.GetTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.Get. See the method description for details. @@ -499,11 +511,11 @@ def get( def insert( self, - request: compute.InsertTargetSslProxyRequest = None, + request: Union[compute.InsertTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy_resource: compute.TargetSslProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -511,7 +523,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.Insert. See the method description for details. @@ -585,10 +597,10 @@ def insert( def list( self, - request: compute.ListTargetSslProxiesRequest = None, + request: Union[compute.ListTargetSslProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -596,7 +608,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListTargetSslProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListTargetSslProxiesRequest, dict]): The request object. A request message for TargetSslProxies.List. See the method description for details. @@ -659,19 +671,19 @@ def list( def set_backend_service( self, - request: compute.SetBackendServiceTargetSslProxyRequest = None, + request: Union[compute.SetBackendServiceTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy: str = None, target_ssl_proxies_set_backend_service_request_resource: compute.TargetSslProxiesSetBackendServiceRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the BackendService for TargetSslProxy. Args: - request (google.cloud.compute_v1.types.SetBackendServiceTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.SetBackendServiceTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.SetBackendService. See the method description for details. @@ -763,19 +775,19 @@ def set_backend_service( def set_proxy_header( self, - request: compute.SetProxyHeaderTargetSslProxyRequest = None, + request: Union[compute.SetProxyHeaderTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy: str = None, target_ssl_proxies_set_proxy_header_request_resource: compute.TargetSslProxiesSetProxyHeaderRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the ProxyHeaderType for TargetSslProxy. Args: - request (google.cloud.compute_v1.types.SetProxyHeaderTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.SetProxyHeaderTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.SetProxyHeader. See the method description for details. @@ -866,19 +878,19 @@ def set_proxy_header( def set_ssl_certificates( self, - request: compute.SetSslCertificatesTargetSslProxyRequest = None, + request: Union[compute.SetSslCertificatesTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy: str = None, target_ssl_proxies_set_ssl_certificates_request_resource: compute.TargetSslProxiesSetSslCertificatesRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes SslCertificates for TargetSslProxy. Args: - request (google.cloud.compute_v1.types.SetSslCertificatesTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.SetSslCertificatesTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.SetSslCertificates. See the method description for details. @@ -970,12 +982,12 @@ def set_ssl_certificates( def set_ssl_policy( self, - request: compute.SetSslPolicyTargetSslProxyRequest = None, + request: Union[compute.SetSslPolicyTargetSslProxyRequest, dict] = None, *, project: str = None, target_ssl_proxy: str = None, ssl_policy_reference_resource: compute.SslPolicyReference = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -986,7 +998,7 @@ def set_ssl_policy( connection between the load balancer and the backends. Args: - request (google.cloud.compute_v1.types.SetSslPolicyTargetSslProxyRequest): + request (Union[google.cloud.compute_v1.types.SetSslPolicyTargetSslProxyRequest, dict]): The request object. A request message for TargetSslProxies.SetSslPolicy. See the method description for details. @@ -1071,6 +1083,19 @@ def set_ssl_policy( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py b/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py index 09b1f2249..55302b87b 100644 --- a/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py +++ b/google/cloud/compute_v1/services/target_ssl_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetSslProxyList]: + def pages(self) -> Iterator[compute.TargetSslProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetSslProxy]: + def __iter__(self) -> Iterator[compute.TargetSslProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py b/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py index 7ab3639a9..60762f056 100644 --- a/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/target_ssl_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetSslProxiesTransport(abc.ABC): """Abstract transport class for TargetSslProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -186,6 +150,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py b/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py index 38dc06094..d86304b45 100644 --- a/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/target_ssl_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetSslProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetSslProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetSslProxiesRestTransport(TargetSslProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( TargetSslProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}".format( - host=self._host, - project=request.project, - target_ssl_proxy=request.target_ssl_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_ssl_proxy", "targetSslProxy"), + ] + + request_kwargs = compute.DeleteTargetSslProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetSslProxyRequest.to_json( + compute.DeleteTargetSslProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetSslProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +221,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetSslProxy: r"""Call the get method over HTTP. @@ -176,6 +237,9 @@ def get( TargetSslProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -192,22 +256,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}".format( - host=self._host, - project=request.project, - target_ssl_proxy=request.target_ssl_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_ssl_proxy", "targetSslProxy"), + ] + + request_kwargs = compute.GetTargetSslProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetSslProxyRequest.to_json( + compute.GetTargetSslProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -219,10 +314,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -233,6 +330,9 @@ def insert( TargetSslProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -256,30 +356,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies", + "body": "target_ssl_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertTargetSslProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetSslProxy.to_json( - request.target_ssl_proxy_resource, + compute.TargetSslProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetSslProxyRequest.to_json( + compute.InsertTargetSslProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetSslProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -290,10 +419,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetSslProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetSslProxyList: r"""Call the list method over HTTP. @@ -304,6 +435,9 @@ def list( TargetSslProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -314,30 +448,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListTargetSslProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetSslProxiesRequest.to_json( + compute.ListTargetSslProxiesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetSslProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetSslProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetSslProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetSslProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetSslProxiesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -349,10 +505,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_backend_service( + def _set_backend_service( self, request: compute.SetBackendServiceTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set backend service method over HTTP. @@ -363,6 +521,9 @@ def set_backend_service( TargetSslProxies.SetBackendService. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -386,32 +547,64 @@ def set_backend_service( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setBackendService", + "body": "target_ssl_proxies_set_backend_service_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_ssl_proxy", "targetSslProxy"), + ] + + request_kwargs = compute.SetBackendServiceTargetSslProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetSslProxiesSetBackendServiceRequest.to_json( - request.target_ssl_proxies_set_backend_service_request_resource, + compute.TargetSslProxiesSetBackendServiceRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setBackendService".format( - host=self._host, - project=request.project, - target_ssl_proxy=request.target_ssl_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetBackendServiceTargetSslProxyRequest.to_json( + compute.SetBackendServiceTargetSslProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetBackendServiceTargetSslProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -422,10 +615,12 @@ def set_backend_service( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_proxy_header( + def _set_proxy_header( self, request: compute.SetProxyHeaderTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set proxy header method over HTTP. @@ -436,6 +631,9 @@ def set_proxy_header( TargetSslProxies.SetProxyHeader. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -459,32 +657,62 @@ def set_proxy_header( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setProxyHeader", + "body": "target_ssl_proxies_set_proxy_header_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_ssl_proxy", "targetSslProxy"), + ] + + request_kwargs = compute.SetProxyHeaderTargetSslProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetSslProxiesSetProxyHeaderRequest.to_json( - request.target_ssl_proxies_set_proxy_header_request_resource, + compute.TargetSslProxiesSetProxyHeaderRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setProxyHeader".format( - host=self._host, - project=request.project, - target_ssl_proxy=request.target_ssl_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetProxyHeaderTargetSslProxyRequest.to_json( + compute.SetProxyHeaderTargetSslProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetProxyHeaderTargetSslProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -495,10 +723,12 @@ def set_proxy_header( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_ssl_certificates( + def _set_ssl_certificates( self, request: compute.SetSslCertificatesTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set ssl certificates method over HTTP. @@ -509,6 +739,9 @@ def set_ssl_certificates( TargetSslProxies.SetSslCertificates. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -532,32 +765,66 @@ def set_ssl_certificates( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslCertificates", + "body": "target_ssl_proxies_set_ssl_certificates_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_ssl_proxy", "targetSslProxy"), + ] + + request_kwargs = compute.SetSslCertificatesTargetSslProxyRequest.to_dict( + request + ) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetSslProxiesSetSslCertificatesRequest.to_json( - request.target_ssl_proxies_set_ssl_certificates_request_resource, + compute.TargetSslProxiesSetSslCertificatesRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslCertificates".format( - host=self._host, - project=request.project, - target_ssl_proxy=request.target_ssl_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSslCertificatesTargetSslProxyRequest.to_json( + compute.SetSslCertificatesTargetSslProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetSslCertificatesTargetSslProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -568,10 +835,12 @@ def set_ssl_certificates( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_ssl_policy( + def _set_ssl_policy( self, request: compute.SetSslPolicyTargetSslProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set ssl policy method over HTTP. @@ -582,6 +851,9 @@ def set_ssl_policy( TargetSslProxies.SetSslPolicy. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -605,32 +877,62 @@ def set_ssl_policy( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslPolicy", + "body": "ssl_policy_reference_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_ssl_proxy", "targetSslProxy"), + ] + + request_kwargs = compute.SetSslPolicyTargetSslProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.SslPolicyReference.to_json( - request.ssl_policy_reference_resource, + compute.SslPolicyReference(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslPolicy".format( - host=self._host, - project=request.project, - target_ssl_proxy=request.target_ssl_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetSslPolicyTargetSslProxyRequest.to_json( + compute.SetSslPolicyTargetSslProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetSslPolicyTargetSslProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -641,5 +943,56 @@ def set_ssl_policy( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetSslProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetSslProxyRequest], compute.TargetSslProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetSslProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetSslProxiesRequest], compute.TargetSslProxyList]: + return self._list + + @property + def set_backend_service( + self, + ) -> Callable[[compute.SetBackendServiceTargetSslProxyRequest], compute.Operation]: + return self._set_backend_service + + @property + def set_proxy_header( + self, + ) -> Callable[[compute.SetProxyHeaderTargetSslProxyRequest], compute.Operation]: + return self._set_proxy_header + + @property + def set_ssl_certificates( + self, + ) -> Callable[[compute.SetSslCertificatesTargetSslProxyRequest], compute.Operation]: + return self._set_ssl_certificates + + @property + def set_ssl_policy( + self, + ) -> Callable[[compute.SetSslPolicyTargetSslProxyRequest], compute.Operation]: + return self._set_ssl_policy + + def close(self): + self._session.close() + __all__ = ("TargetSslProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/target_tcp_proxies/client.py b/google/cloud/compute_v1/services/target_tcp_proxies/client.py index a0922b36c..d423efdea 100644 --- a/google/cloud/compute_v1/services/target_tcp_proxies/client.py +++ b/google/cloud/compute_v1/services/target_tcp_proxies/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_tcp_proxies import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetTcpProxiesTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,22 +337,23 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteTargetTcpProxyRequest = None, + request: Union[compute.DeleteTargetTcpProxyRequest, dict] = None, *, project: str = None, target_tcp_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified TargetTcpProxy resource. Args: - request (google.cloud.compute_v1.types.DeleteTargetTcpProxyRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetTcpProxyRequest, dict]): The request object. A request message for TargetTcpProxies.Delete. See the method description for details. @@ -417,11 +429,11 @@ def delete( def get( self, - request: compute.GetTargetTcpProxyRequest = None, + request: Union[compute.GetTargetTcpProxyRequest, dict] = None, *, project: str = None, target_tcp_proxy: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetTcpProxy: @@ -430,7 +442,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetTargetTcpProxyRequest): + request (Union[google.cloud.compute_v1.types.GetTargetTcpProxyRequest, dict]): The request object. A request message for TargetTcpProxies.Get. See the method description for details. @@ -499,11 +511,11 @@ def get( def insert( self, - request: compute.InsertTargetTcpProxyRequest = None, + request: Union[compute.InsertTargetTcpProxyRequest, dict] = None, *, project: str = None, target_tcp_proxy_resource: compute.TargetTcpProxy = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -511,7 +523,7 @@ def insert( project using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetTcpProxyRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetTcpProxyRequest, dict]): The request object. A request message for TargetTcpProxies.Insert. See the method description for details. @@ -585,10 +597,10 @@ def insert( def list( self, - request: compute.ListTargetTcpProxiesRequest = None, + request: Union[compute.ListTargetTcpProxiesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -596,7 +608,7 @@ def list( available to the specified project. Args: - request (google.cloud.compute_v1.types.ListTargetTcpProxiesRequest): + request (Union[google.cloud.compute_v1.types.ListTargetTcpProxiesRequest, dict]): The request object. A request message for TargetTcpProxies.List. See the method description for details. @@ -659,19 +671,19 @@ def list( def set_backend_service( self, - request: compute.SetBackendServiceTargetTcpProxyRequest = None, + request: Union[compute.SetBackendServiceTargetTcpProxyRequest, dict] = None, *, project: str = None, target_tcp_proxy: str = None, target_tcp_proxies_set_backend_service_request_resource: compute.TargetTcpProxiesSetBackendServiceRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the BackendService for TargetTcpProxy. Args: - request (google.cloud.compute_v1.types.SetBackendServiceTargetTcpProxyRequest): + request (Union[google.cloud.compute_v1.types.SetBackendServiceTargetTcpProxyRequest, dict]): The request object. A request message for TargetTcpProxies.SetBackendService. See the method description for details. @@ -763,19 +775,19 @@ def set_backend_service( def set_proxy_header( self, - request: compute.SetProxyHeaderTargetTcpProxyRequest = None, + request: Union[compute.SetProxyHeaderTargetTcpProxyRequest, dict] = None, *, project: str = None, target_tcp_proxy: str = None, target_tcp_proxies_set_proxy_header_request_resource: compute.TargetTcpProxiesSetProxyHeaderRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Changes the ProxyHeaderType for TargetTcpProxy. Args: - request (google.cloud.compute_v1.types.SetProxyHeaderTargetTcpProxyRequest): + request (Union[google.cloud.compute_v1.types.SetProxyHeaderTargetTcpProxyRequest, dict]): The request object. A request message for TargetTcpProxies.SetProxyHeader. See the method description for details. @@ -864,6 +876,19 @@ def set_proxy_header( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py b/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py index a942527ae..43c1e1898 100644 --- a/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py +++ b/google/cloud/compute_v1/services/target_tcp_proxies/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetTcpProxyList]: + def pages(self) -> Iterator[compute.TargetTcpProxyList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetTcpProxy]: + def __iter__(self) -> Iterator[compute.TargetTcpProxy]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py b/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py index 2ac783c3a..c77606f21 100644 --- a/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py +++ b/google/cloud/compute_v1/services/target_tcp_proxies/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetTcpProxiesTransport(abc.ABC): """Abstract transport class for TargetTcpProxies.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -178,6 +142,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py b/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py index 1813b0029..d8242da21 100644 --- a/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py +++ b/google/cloud/compute_v1/services/target_tcp_proxies/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetTcpProxiesTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetTcpProxiesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetTcpProxiesRestTransport(TargetTcpProxiesTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteTargetTcpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( TargetTcpProxies.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -135,24 +165,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}".format( - host=self._host, - project=request.project, - target_tcp_proxy=request.target_tcp_proxy, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_tcp_proxy", "targetTcpProxy"), + ] + + request_kwargs = compute.DeleteTargetTcpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetTcpProxyRequest.to_json( + compute.DeleteTargetTcpProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetTcpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -162,10 +221,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetTcpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetTcpProxy: r"""Call the get method over HTTP. @@ -176,6 +237,9 @@ def get( TargetTcpProxies.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -192,22 +256,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}".format( - host=self._host, - project=request.project, - target_tcp_proxy=request.target_tcp_proxy, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_tcp_proxy", "targetTcpProxy"), + ] + + request_kwargs = compute.GetTargetTcpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetTcpProxyRequest.to_json( + compute.GetTargetTcpProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -219,10 +314,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetTcpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -233,6 +330,9 @@ def insert( TargetTcpProxies.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -256,30 +356,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetTcpProxies", + "body": "target_tcp_proxy_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertTargetTcpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetTcpProxy.to_json( - request.target_tcp_proxy_resource, + compute.TargetTcpProxy(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetTcpProxies".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetTcpProxyRequest.to_json( + compute.InsertTargetTcpProxyRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetTcpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -290,10 +419,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetTcpProxiesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetTcpProxyList: r"""Call the list method over HTTP. @@ -304,6 +435,9 @@ def list( TargetTcpProxies.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -314,30 +448,52 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetTcpProxies".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/targetTcpProxies", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListTargetTcpProxiesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetTcpProxiesRequest.to_json( + compute.ListTargetTcpProxiesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetTcpProxiesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetTcpProxiesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetTcpProxiesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetTcpProxiesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetTcpProxiesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -349,10 +505,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_backend_service( + def _set_backend_service( self, request: compute.SetBackendServiceTargetTcpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set backend service method over HTTP. @@ -363,6 +521,9 @@ def set_backend_service( TargetTcpProxies.SetBackendService. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -386,32 +547,64 @@ def set_backend_service( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setBackendService", + "body": "target_tcp_proxies_set_backend_service_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_tcp_proxy", "targetTcpProxy"), + ] + + request_kwargs = compute.SetBackendServiceTargetTcpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetTcpProxiesSetBackendServiceRequest.to_json( - request.target_tcp_proxies_set_backend_service_request_resource, + compute.TargetTcpProxiesSetBackendServiceRequest( + transcoded_request["body"] + ), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setBackendService".format( - host=self._host, - project=request.project, - target_tcp_proxy=request.target_tcp_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetBackendServiceTargetTcpProxyRequest.to_json( + compute.SetBackendServiceTargetTcpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetBackendServiceTargetTcpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -422,10 +615,12 @@ def set_backend_service( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def set_proxy_header( + def _set_proxy_header( self, request: compute.SetProxyHeaderTargetTcpProxyRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set proxy header method over HTTP. @@ -436,6 +631,9 @@ def set_proxy_header( TargetTcpProxies.SetProxyHeader. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -459,32 +657,62 @@ def set_proxy_header( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setProxyHeader", + "body": "target_tcp_proxies_set_proxy_header_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("target_tcp_proxy", "targetTcpProxy"), + ] + + request_kwargs = compute.SetProxyHeaderTargetTcpProxyRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetTcpProxiesSetProxyHeaderRequest.to_json( - request.target_tcp_proxies_set_proxy_header_request_resource, + compute.TargetTcpProxiesSetProxyHeaderRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setProxyHeader".format( - host=self._host, - project=request.project, - target_tcp_proxy=request.target_tcp_proxy, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetProxyHeaderTargetTcpProxyRequest.to_json( + compute.SetProxyHeaderTargetTcpProxyRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetProxyHeaderTargetTcpProxyRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -495,5 +723,44 @@ def set_proxy_header( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetTcpProxyRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetTcpProxyRequest], compute.TargetTcpProxy]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetTcpProxyRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetTcpProxiesRequest], compute.TargetTcpProxyList]: + return self._list + + @property + def set_backend_service( + self, + ) -> Callable[[compute.SetBackendServiceTargetTcpProxyRequest], compute.Operation]: + return self._set_backend_service + + @property + def set_proxy_header( + self, + ) -> Callable[[compute.SetProxyHeaderTargetTcpProxyRequest], compute.Operation]: + return self._set_proxy_header + + def close(self): + self._session.close() + __all__ = ("TargetTcpProxiesRestTransport",) diff --git a/google/cloud/compute_v1/services/target_vpn_gateways/client.py b/google/cloud/compute_v1/services/target_vpn_gateways/client.py index 56408cf76..9cec23793 100644 --- a/google/cloud/compute_v1/services/target_vpn_gateways/client.py +++ b/google/cloud/compute_v1/services/target_vpn_gateways/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.target_vpn_gateways import pagers from google.cloud.compute_v1.types import compute from .transports.base import TargetVpnGatewaysTransport, DEFAULT_CLIENT_INFO @@ -265,8 +269,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -328,21 +339,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListTargetVpnGatewaysRequest = None, + request: Union[compute.AggregatedListTargetVpnGatewaysRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of target VPN gateways. Args: - request (google.cloud.compute_v1.types.AggregatedListTargetVpnGatewaysRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListTargetVpnGatewaysRequest, dict]): The request object. A request message for TargetVpnGateways.AggregatedList. See the method description for details. @@ -403,19 +415,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteTargetVpnGatewayRequest = None, + request: Union[compute.DeleteTargetVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, target_vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified target VPN gateway. Args: - request (google.cloud.compute_v1.types.DeleteTargetVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.DeleteTargetVpnGatewayRequest, dict]): The request object. A request message for TargetVpnGateways.Delete. See the method description for details. @@ -498,12 +510,12 @@ def delete( def get( self, - request: compute.GetTargetVpnGatewayRequest = None, + request: Union[compute.GetTargetVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, target_vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetVpnGateway: @@ -512,7 +524,7 @@ def get( request. Args: - request (google.cloud.compute_v1.types.GetTargetVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.GetTargetVpnGatewayRequest, dict]): The request object. A request message for TargetVpnGateways.Get. See the method description for details. @@ -585,12 +597,12 @@ def get( def insert( self, - request: compute.InsertTargetVpnGatewayRequest = None, + request: Union[compute.InsertTargetVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, target_vpn_gateway_resource: compute.TargetVpnGateway = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -598,7 +610,7 @@ def insert( and region using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertTargetVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.InsertTargetVpnGatewayRequest, dict]): The request object. A request message for TargetVpnGateways.Insert. See the method description for details. @@ -679,11 +691,11 @@ def insert( def list( self, - request: compute.ListTargetVpnGatewaysRequest = None, + request: Union[compute.ListTargetVpnGatewaysRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -691,7 +703,7 @@ def list( the specified project and region. Args: - request (google.cloud.compute_v1.types.ListTargetVpnGatewaysRequest): + request (Union[google.cloud.compute_v1.types.ListTargetVpnGatewaysRequest, dict]): The request object. A request message for TargetVpnGateways.List. See the method description for details. @@ -759,6 +771,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py b/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py index a5dcca01e..b85fc19c4 100644 --- a/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py +++ b/google/cloud/compute_v1/services/target_vpn_gateways/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetVpnGatewayAggregatedList]: + def pages(self) -> Iterator[compute.TargetVpnGatewayAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.TargetVpnGatewaysScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.TargetVpnGatewaysScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.TargetVpnGatewayList]: + def pages(self) -> Iterator[compute.TargetVpnGatewayList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.TargetVpnGateway]: + def __iter__(self) -> Iterator[compute.TargetVpnGateway]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py b/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py index 7b33ee556..0800d5d84 100644 --- a/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py +++ b/google/cloud/compute_v1/services/target_vpn_gateways/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class TargetVpnGatewaysTransport(abc.ABC): """Abstract transport class for TargetVpnGateways.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py b/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py index 489124f8d..d1dab1a38 100644 --- a/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py +++ b/google/cloud/compute_v1/services/target_vpn_gateways/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + TargetVpnGatewaysTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import TargetVpnGatewaysTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class TargetVpnGatewaysRestTransport(TargetVpnGatewaysTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListTargetVpnGatewaysRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetVpnGatewayAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +139,9 @@ def aggregated_list( TargetVpnGateways.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,35 +150,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/targetVpnGateways".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/targetVpnGateways", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListTargetVpnGatewaysRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListTargetVpnGatewaysRequest.to_json( + compute.AggregatedListTargetVpnGatewaysRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListTargetVpnGatewaysRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListTargetVpnGatewaysRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListTargetVpnGatewaysRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListTargetVpnGatewaysRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListTargetVpnGatewaysRequest.page_token in request: - query_params["pageToken"] = request.page_token - if ( - compute.AggregatedListTargetVpnGatewaysRequest.return_partial_success - in request - ): - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -160,10 +209,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteTargetVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -174,6 +225,9 @@ def delete( TargetVpnGateways.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -197,25 +251,56 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}".format( - host=self._host, - project=request.project, - region=request.region, - target_vpn_gateway=request.target_vpn_gateway, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_vpn_gateway", "targetVpnGateway"), + ] + + request_kwargs = compute.DeleteTargetVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteTargetVpnGatewayRequest.to_json( + compute.DeleteTargetVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteTargetVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -225,10 +310,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetTargetVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetVpnGateway: r"""Call the get method over HTTP. @@ -239,6 +326,9 @@ def get( TargetVpnGateways.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -252,23 +342,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}".format( - host=self._host, - project=request.project, - region=request.region, - target_vpn_gateway=request.target_vpn_gateway, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("target_vpn_gateway", "targetVpnGateway"), + ] + + request_kwargs = compute.GetTargetVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetTargetVpnGatewayRequest.to_json( + compute.GetTargetVpnGatewayRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -280,10 +401,12 @@ def get( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertTargetVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -294,6 +417,9 @@ def insert( TargetVpnGateways.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -317,30 +443,62 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetVpnGateways", + "body": "target_vpn_gateway_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertTargetVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TargetVpnGateway.to_json( - request.target_vpn_gateway_resource, + compute.TargetVpnGateway(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetVpnGateways".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertTargetVpnGatewayRequest.to_json( + compute.InsertTargetVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertTargetVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -351,10 +509,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListTargetVpnGatewaysRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TargetVpnGatewayList: r"""Call the list method over HTTP. @@ -365,6 +525,9 @@ def list( TargetVpnGateways.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -375,30 +538,55 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/targetVpnGateways".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/targetVpnGateways", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListTargetVpnGatewaysRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListTargetVpnGatewaysRequest.to_json( + compute.ListTargetVpnGatewaysRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListTargetVpnGatewaysRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListTargetVpnGatewaysRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListTargetVpnGatewaysRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListTargetVpnGatewaysRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListTargetVpnGatewaysRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -410,5 +598,41 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListTargetVpnGatewaysRequest], + compute.TargetVpnGatewayAggregatedList, + ]: + return self._aggregated_list + + @property + def delete( + self, + ) -> Callable[[compute.DeleteTargetVpnGatewayRequest], compute.Operation]: + return self._delete + + @property + def get( + self, + ) -> Callable[[compute.GetTargetVpnGatewayRequest], compute.TargetVpnGateway]: + return self._get + + @property + def insert( + self, + ) -> Callable[[compute.InsertTargetVpnGatewayRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListTargetVpnGatewaysRequest], compute.TargetVpnGatewayList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("TargetVpnGatewaysRestTransport",) diff --git a/google/cloud/compute_v1/services/url_maps/client.py b/google/cloud/compute_v1/services/url_maps/client.py index 549efb047..91bc39013 100644 --- a/google/cloud/compute_v1/services/url_maps/client.py +++ b/google/cloud/compute_v1/services/url_maps/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.url_maps import pagers from google.cloud.compute_v1.types import compute from .transports.base import UrlMapsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,14 +335,15 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListUrlMapsRequest = None, + request: Union[compute.AggregatedListUrlMapsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: @@ -339,7 +351,7 @@ def aggregated_list( and global, available to the specified project. Args: - request (google.cloud.compute_v1.types.AggregatedListUrlMapsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListUrlMapsRequest, dict]): The request object. A request message for UrlMaps.AggregatedList. See the method description for details. @@ -402,18 +414,18 @@ def aggregated_list( def delete( self, - request: compute.DeleteUrlMapRequest = None, + request: Union[compute.DeleteUrlMapRequest, dict] = None, *, project: str = None, url_map: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified UrlMap resource. Args: - request (google.cloud.compute_v1.types.DeleteUrlMapRequest): + request (Union[google.cloud.compute_v1.types.DeleteUrlMapRequest, dict]): The request object. A request message for UrlMaps.Delete. See the method description for details. project (str): @@ -488,11 +500,11 @@ def delete( def get( self, - request: compute.GetUrlMapRequest = None, + request: Union[compute.GetUrlMapRequest, dict] = None, *, project: str = None, url_map: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMap: @@ -500,7 +512,7 @@ def get( available URL maps by making a list() request. Args: - request (google.cloud.compute_v1.types.GetUrlMapRequest): + request (Union[google.cloud.compute_v1.types.GetUrlMapRequest, dict]): The request object. A request message for UrlMaps.Get. See the method description for details. project (str): @@ -581,11 +593,11 @@ def get( def insert( self, - request: compute.InsertUrlMapRequest = None, + request: Union[compute.InsertUrlMapRequest, dict] = None, *, project: str = None, url_map_resource: compute.UrlMap = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -593,7 +605,7 @@ def insert( using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertUrlMapRequest): + request (Union[google.cloud.compute_v1.types.InsertUrlMapRequest, dict]): The request object. A request message for UrlMaps.Insert. See the method description for details. project (str): @@ -666,12 +678,12 @@ def insert( def invalidate_cache( self, - request: compute.InvalidateCacheUrlMapRequest = None, + request: Union[compute.InvalidateCacheUrlMapRequest, dict] = None, *, project: str = None, url_map: str = None, cache_invalidation_rule_resource: compute.CacheInvalidationRule = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -681,7 +693,7 @@ def invalidate_cache( content `__. Args: - request (google.cloud.compute_v1.types.InvalidateCacheUrlMapRequest): + request (Union[google.cloud.compute_v1.types.InvalidateCacheUrlMapRequest, dict]): The request object. A request message for UrlMaps.InvalidateCache. See the method description for details. @@ -766,10 +778,10 @@ def invalidate_cache( def list( self, - request: compute.ListUrlMapsRequest = None, + request: Union[compute.ListUrlMapsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -777,7 +789,7 @@ def list( the specified project. Args: - request (google.cloud.compute_v1.types.ListUrlMapsRequest): + request (Union[google.cloud.compute_v1.types.ListUrlMapsRequest, dict]): The request object. A request message for UrlMaps.List. See the method description for details. project (str): @@ -838,12 +850,12 @@ def list( def patch( self, - request: compute.PatchUrlMapRequest = None, + request: Union[compute.PatchUrlMapRequest, dict] = None, *, project: str = None, url_map: str = None, url_map_resource: compute.UrlMap = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -853,7 +865,7 @@ def patch( processing rules. Args: - request (google.cloud.compute_v1.types.PatchUrlMapRequest): + request (Union[google.cloud.compute_v1.types.PatchUrlMapRequest, dict]): The request object. A request message for UrlMaps.Patch. See the method description for details. project (str): @@ -933,12 +945,12 @@ def patch( def update( self, - request: compute.UpdateUrlMapRequest = None, + request: Union[compute.UpdateUrlMapRequest, dict] = None, *, project: str = None, url_map: str = None, url_map_resource: compute.UrlMap = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -946,7 +958,7 @@ def update( included in the request. Args: - request (google.cloud.compute_v1.types.UpdateUrlMapRequest): + request (Union[google.cloud.compute_v1.types.UpdateUrlMapRequest, dict]): The request object. A request message for UrlMaps.Update. See the method description for details. project (str): @@ -1028,12 +1040,12 @@ def update( def validate( self, - request: compute.ValidateUrlMapRequest = None, + request: Union[compute.ValidateUrlMapRequest, dict] = None, *, project: str = None, url_map: str = None, url_maps_validate_request_resource: compute.UrlMapsValidateRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapsValidateResponse: @@ -1042,7 +1054,7 @@ def validate( this method does NOT create the UrlMap. Args: - request (google.cloud.compute_v1.types.ValidateUrlMapRequest): + request (Union[google.cloud.compute_v1.types.ValidateUrlMapRequest, dict]): The request object. A request message for UrlMaps.Validate. See the method description for details. @@ -1112,6 +1124,19 @@ def validate( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/url_maps/pagers.py b/google/cloud/compute_v1/services/url_maps/pagers.py index 1a5d42fc4..0d33fed26 100644 --- a/google/cloud/compute_v1/services/url_maps/pagers.py +++ b/google/cloud/compute_v1/services/url_maps/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.UrlMapsAggregatedList]: + def pages(self) -> Iterator[compute.UrlMapsAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.UrlMapsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.UrlMapsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.UrlMapList]: + def pages(self) -> Iterator[compute.UrlMapList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.UrlMap]: + def __iter__(self) -> Iterator[compute.UrlMap]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/url_maps/transports/base.py b/google/cloud/compute_v1/services/url_maps/transports/base.py index c7d9f1373..0b5c9b480 100644 --- a/google/cloud/compute_v1/services/url_maps/transports/base.py +++ b/google/cloud/compute_v1/services/url_maps/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class UrlMapsTransport(abc.ABC): """Abstract transport class for UrlMaps.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -187,6 +151,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/url_maps/transports/rest.py b/google/cloud/compute_v1/services/url_maps/transports/rest.py index 9e39d99c8..760dfb78a 100644 --- a/google/cloud/compute_v1/services/url_maps/transports/rest.py +++ b/google/cloud/compute_v1/services/url_maps/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import UrlMapsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import UrlMapsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class UrlMapsRestTransport(UrlMapsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListUrlMapsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapsAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( UrlMaps.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/urlMaps".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/urlMaps", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListUrlMapsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListUrlMapsRequest.to_json( + compute.AggregatedListUrlMapsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListUrlMapsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListUrlMapsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListUrlMapsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListUrlMapsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListUrlMapsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListUrlMapsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -170,6 +221,9 @@ def delete( The request object. A request message for UrlMaps.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -193,22 +247,53 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps/{url_map}".format( - host=self._host, project=request.project, url_map=request.url_map, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/global/urlMaps/{url_map}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.DeleteUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteUrlMapRequest.to_json( + compute.DeleteUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -218,10 +303,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMap: r"""Call the get method over HTTP. @@ -231,6 +318,9 @@ def get( The request object. A request message for UrlMaps.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -260,20 +350,53 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps/{url_map}".format( - host=self._host, project=request.project, url_map=request.url_map, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/urlMaps/{url_map}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.GetUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetUrlMapRequest.to_json( + compute.GetUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -283,10 +406,12 @@ def get( # Return the response return compute.UrlMap.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -296,6 +421,9 @@ def insert( The request object. A request message for UrlMaps.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -319,30 +447,59 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/urlMaps", + "body": "url_map_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.InsertUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMap.to_json( - request.url_map_resource, + compute.UrlMap(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps".format( - host=self._host, project=request.project, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertUrlMapRequest.to_json( + compute.InsertUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -353,10 +510,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def invalidate_cache( + def _invalidate_cache( self, request: compute.InvalidateCacheUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the invalidate cache method over HTTP. @@ -367,6 +526,9 @@ def invalidate_cache( UrlMaps.InvalidateCache. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -390,30 +552,62 @@ def invalidate_cache( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/urlMaps/{url_map}/invalidateCache", + "body": "cache_invalidation_rule_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.InvalidateCacheUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.CacheInvalidationRule.to_json( - request.cache_invalidation_rule_resource, + compute.CacheInvalidationRule(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps/{url_map}/invalidateCache".format( - host=self._host, project=request.project, url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InvalidateCacheUrlMapRequest.to_json( + compute.InvalidateCacheUrlMapRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InvalidateCacheUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -424,10 +618,12 @@ def invalidate_cache( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListUrlMapsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapList: r"""Call the list method over HTTP. @@ -437,6 +633,9 @@ def list( The request object. A request message for UrlMaps.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -445,30 +644,49 @@ def list( Contains a list of UrlMap resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/global/urlMaps",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListUrlMapsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListUrlMapsRequest.to_json( + compute.ListUrlMapsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListUrlMapsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListUrlMapsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListUrlMapsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListUrlMapsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListUrlMapsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -480,10 +698,12 @@ def list( response.content, ignore_unknown_fields=True ) - def patch( + def _patch( self, request: compute.PatchUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the patch method over HTTP. @@ -493,6 +713,9 @@ def patch( The request object. A request message for UrlMaps.Patch. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -516,30 +739,60 @@ def patch( """ + http_options = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/global/urlMaps/{url_map}", + "body": "url_map_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.PatchUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMap.to_json( - request.url_map_resource, + compute.UrlMap(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps/{url_map}".format( - host=self._host, project=request.project, url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.PatchUrlMapRequest.to_json( + compute.PatchUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.PatchUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.patch( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -550,10 +803,12 @@ def patch( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def update( + def _update( self, request: compute.UpdateUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the update method over HTTP. @@ -563,6 +818,9 @@ def update( The request object. A request message for UrlMaps.Update. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -586,30 +844,60 @@ def update( """ + http_options = [ + { + "method": "put", + "uri": "/compute/v1/projects/{project}/global/urlMaps/{url_map}", + "body": "url_map_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.UpdateUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMap.to_json( - request.url_map_resource, + compute.UrlMap(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps/{url_map}".format( - host=self._host, project=request.project, url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.UpdateUrlMapRequest.to_json( + compute.UpdateUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.UpdateUrlMapRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.put( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -620,10 +908,12 @@ def update( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def validate( + def _validate( self, request: compute.ValidateUrlMapRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.UrlMapsValidateResponse: r"""Call the validate method over HTTP. @@ -634,6 +924,9 @@ def validate( UrlMaps.Validate. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -642,28 +935,60 @@ def validate( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/urlMaps/{url_map}/validate", + "body": "url_maps_validate_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("url_map", "urlMap"), + ] + + request_kwargs = compute.ValidateUrlMapRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.UrlMapsValidateRequest.to_json( - request.url_maps_validate_request_resource, + compute.UrlMapsValidateRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/global/urlMaps/{url_map}/validate".format( - host=self._host, project=request.project, url_map=request.url_map, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ValidateUrlMapRequest.to_json( + compute.ValidateUrlMapRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -676,5 +1001,52 @@ def validate( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListUrlMapsRequest], compute.UrlMapsAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteUrlMapRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetUrlMapRequest], compute.UrlMap]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertUrlMapRequest], compute.Operation]: + return self._insert + + @property + def invalidate_cache( + self, + ) -> Callable[[compute.InvalidateCacheUrlMapRequest], compute.Operation]: + return self._invalidate_cache + + @property + def list(self) -> Callable[[compute.ListUrlMapsRequest], compute.UrlMapList]: + return self._list + + @property + def patch(self) -> Callable[[compute.PatchUrlMapRequest], compute.Operation]: + return self._patch + + @property + def update(self) -> Callable[[compute.UpdateUrlMapRequest], compute.Operation]: + return self._update + + @property + def validate( + self, + ) -> Callable[[compute.ValidateUrlMapRequest], compute.UrlMapsValidateResponse]: + return self._validate + + def close(self): + self._session.close() + __all__ = ("UrlMapsRestTransport",) diff --git a/google/cloud/compute_v1/services/vpn_gateways/client.py b/google/cloud/compute_v1/services/vpn_gateways/client.py index 8993c987b..3d5d0aa19 100644 --- a/google/cloud/compute_v1/services/vpn_gateways/client.py +++ b/google/cloud/compute_v1/services/vpn_gateways/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.vpn_gateways import pagers from google.cloud.compute_v1.types import compute from .transports.base import VpnGatewaysTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListVpnGatewaysRequest = None, + request: Union[compute.AggregatedListVpnGatewaysRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of VPN gateways. Args: - request (google.cloud.compute_v1.types.AggregatedListVpnGatewaysRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListVpnGatewaysRequest, dict]): The request object. A request message for VpnGateways.AggregatedList. See the method description for details. @@ -399,19 +411,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteVpnGatewayRequest = None, + request: Union[compute.DeleteVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified VPN gateway. Args: - request (google.cloud.compute_v1.types.DeleteVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.DeleteVpnGatewayRequest, dict]): The request object. A request message for VpnGateways.Delete. See the method description for details. @@ -492,12 +504,12 @@ def delete( def get( self, - request: compute.GetVpnGatewayRequest = None, + request: Union[compute.GetVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnGateway: @@ -505,7 +517,7 @@ def get( available VPN gateways by making a list() request. Args: - request (google.cloud.compute_v1.types.GetVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.GetVpnGatewayRequest, dict]): The request object. A request message for VpnGateways.Get. See the method description for details. project (str): @@ -579,19 +591,19 @@ def get( def get_status( self, - request: compute.GetStatusVpnGatewayRequest = None, + request: Union[compute.GetStatusVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, vpn_gateway: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnGatewaysGetStatusResponse: r"""Returns the status for the specified VPN gateway. Args: - request (google.cloud.compute_v1.types.GetStatusVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.GetStatusVpnGatewayRequest, dict]): The request object. A request message for VpnGateways.GetStatus. See the method description for details. @@ -657,12 +669,12 @@ def get_status( def insert( self, - request: compute.InsertVpnGatewayRequest = None, + request: Union[compute.InsertVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, vpn_gateway_resource: compute.VpnGateway = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -670,7 +682,7 @@ def insert( region using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.InsertVpnGatewayRequest, dict]): The request object. A request message for VpnGateways.Insert. See the method description for details. @@ -751,11 +763,11 @@ def insert( def list( self, - request: compute.ListVpnGatewaysRequest = None, + request: Union[compute.ListVpnGatewaysRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -763,7 +775,7 @@ def list( specified project and region. Args: - request (google.cloud.compute_v1.types.ListVpnGatewaysRequest): + request (Union[google.cloud.compute_v1.types.ListVpnGatewaysRequest, dict]): The request object. A request message for VpnGateways.List. See the method description for details. @@ -833,13 +845,13 @@ def list( def set_labels( self, - request: compute.SetLabelsVpnGatewayRequest = None, + request: Union[compute.SetLabelsVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, region_set_labels_request_resource: compute.RegionSetLabelsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -847,7 +859,7 @@ def set_labels( labels, read the Labeling Resources documentation. Args: - request (google.cloud.compute_v1.types.SetLabelsVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.SetLabelsVpnGatewayRequest, dict]): The request object. A request message for VpnGateways.SetLabels. See the method description for details. @@ -941,13 +953,13 @@ def set_labels( def test_iam_permissions( self, - request: compute.TestIamPermissionsVpnGatewayRequest = None, + request: Union[compute.TestIamPermissionsVpnGatewayRequest, dict] = None, *, project: str = None, region: str = None, resource: str = None, test_permissions_request_resource: compute.TestPermissionsRequest = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: @@ -955,7 +967,7 @@ def test_iam_permissions( specified resource. Args: - request (google.cloud.compute_v1.types.TestIamPermissionsVpnGatewayRequest): + request (Union[google.cloud.compute_v1.types.TestIamPermissionsVpnGatewayRequest, dict]): The request object. A request message for VpnGateways.TestIamPermissions. See the method description for details. @@ -1034,6 +1046,19 @@ def test_iam_permissions( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/vpn_gateways/pagers.py b/google/cloud/compute_v1/services/vpn_gateways/pagers.py index e8f6cc966..774205758 100644 --- a/google/cloud/compute_v1/services/vpn_gateways/pagers.py +++ b/google/cloud/compute_v1/services/vpn_gateways/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.VpnGatewayAggregatedList]: + def pages(self) -> Iterator[compute.VpnGatewayAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.VpnGatewaysScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.VpnGatewaysScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.VpnGatewayList]: + def pages(self) -> Iterator[compute.VpnGatewayList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.VpnGateway]: + def __iter__(self) -> Iterator[compute.VpnGateway]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/vpn_gateways/transports/base.py b/google/cloud/compute_v1/services/vpn_gateways/transports/base.py index 5e4c8d357..a952c85d5 100644 --- a/google/cloud/compute_v1/services/vpn_gateways/transports/base.py +++ b/google/cloud/compute_v1/services/vpn_gateways/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class VpnGatewaysTransport(abc.ABC): """Abstract transport class for VpnGateways.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -186,6 +150,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py b/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py index 725709ac6..9065727e4 100644 --- a/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py +++ b/google/cloud/compute_v1/services/vpn_gateways/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import VpnGatewaysTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import VpnGatewaysTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class VpnGatewaysRestTransport(VpnGatewaysTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListVpnGatewaysRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnGatewayAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( VpnGateways.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/vpnGateways".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/vpnGateways", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListVpnGatewaysRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListVpnGatewaysRequest.to_json( + compute.AggregatedListVpnGatewaysRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListVpnGatewaysRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListVpnGatewaysRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListVpnGatewaysRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListVpnGatewaysRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListVpnGatewaysRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListVpnGatewaysRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( VpnGateways.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +248,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}".format( - host=self._host, - project=request.project, - region=request.region, - vpn_gateway=request.vpn_gateway, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("vpn_gateway", "vpnGateway"), + ] + + request_kwargs = compute.DeleteVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteVpnGatewayRequest.to_json( + compute.DeleteVpnGatewayRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +305,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnGateway: r"""Call the get method over HTTP. @@ -236,6 +321,9 @@ def get( VpnGateways.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -253,23 +341,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}".format( - host=self._host, - project=request.project, - region=request.region, - vpn_gateway=request.vpn_gateway, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("vpn_gateway", "vpnGateway"), + ] + + request_kwargs = compute.GetVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetVpnGatewayRequest.to_json( + compute.GetVpnGatewayRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -281,10 +400,12 @@ def get( response.content, ignore_unknown_fields=True ) - def get_status( + def _get_status( self, request: compute.GetStatusVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnGatewaysGetStatusResponse: r"""Call the get status method over HTTP. @@ -295,6 +416,9 @@ def get_status( VpnGateways.GetStatus. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -303,23 +427,54 @@ def get_status( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}/getStatus".format( - host=self._host, - project=request.project, - region=request.region, - vpn_gateway=request.vpn_gateway, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}/getStatus", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("vpn_gateway", "vpnGateway"), + ] + + request_kwargs = compute.GetStatusVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetStatusVpnGatewayRequest.to_json( + compute.GetStatusVpnGatewayRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -331,10 +486,12 @@ def get_status( response.content, ignore_unknown_fields=True ) - def insert( + def _insert( self, request: compute.InsertVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -345,6 +502,9 @@ def insert( VpnGateways.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -368,30 +528,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways", + "body": "vpn_gateway_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.VpnGateway.to_json( - request.vpn_gateway_resource, + compute.VpnGateway(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertVpnGatewayRequest.to_json( + compute.InsertVpnGatewayRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -402,10 +592,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListVpnGatewaysRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnGatewayList: r"""Call the list method over HTTP. @@ -416,6 +608,9 @@ def list( VpnGateways.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -426,30 +621,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListVpnGatewaysRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListVpnGatewaysRequest.to_json( + compute.ListVpnGatewaysRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListVpnGatewaysRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListVpnGatewaysRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListVpnGatewaysRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListVpnGatewaysRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListVpnGatewaysRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -461,10 +679,12 @@ def list( response.content, ignore_unknown_fields=True ) - def set_labels( + def _set_labels( self, request: compute.SetLabelsVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the set labels method over HTTP. @@ -475,6 +695,9 @@ def set_labels( VpnGateways.SetLabels. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -498,33 +721,61 @@ def set_labels( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/setLabels", + "body": "region_set_labels_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.SetLabelsVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.RegionSetLabelsRequest.to_json( - request.region_set_labels_request_resource, + compute.RegionSetLabelsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/setLabels".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.SetLabelsVpnGatewayRequest.to_json( + compute.SetLabelsVpnGatewayRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.SetLabelsVpnGatewayRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -535,10 +786,12 @@ def set_labels( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def test_iam_permissions( + def _test_iam_permissions( self, request: compute.TestIamPermissionsVpnGatewayRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.TestPermissionsResponse: r"""Call the test iam permissions method over HTTP. @@ -549,6 +802,9 @@ def test_iam_permissions( VpnGateways.TestIamPermissions. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -557,31 +813,63 @@ def test_iam_permissions( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("resource", "resource"), + ] + + request_kwargs = compute.TestIamPermissionsVpnGatewayRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.TestPermissionsRequest.to_json( - request.test_permissions_request_resource, + compute.TestPermissionsRequest(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/testIamPermissions".format( - host=self._host, - project=request.project, - region=request.region, - resource=request.resource, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.TestIamPermissionsVpnGatewayRequest.to_json( + compute.TestIamPermissionsVpnGatewayRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -594,5 +882,56 @@ def test_iam_permissions( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListVpnGatewaysRequest], compute.VpnGatewayAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteVpnGatewayRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetVpnGatewayRequest], compute.VpnGateway]: + return self._get + + @property + def get_status( + self, + ) -> Callable[ + [compute.GetStatusVpnGatewayRequest], compute.VpnGatewaysGetStatusResponse + ]: + return self._get_status + + @property + def insert(self) -> Callable[[compute.InsertVpnGatewayRequest], compute.Operation]: + return self._insert + + @property + def list( + self, + ) -> Callable[[compute.ListVpnGatewaysRequest], compute.VpnGatewayList]: + return self._list + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsVpnGatewayRequest], compute.Operation]: + return self._set_labels + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsVpnGatewayRequest], compute.TestPermissionsResponse + ]: + return self._test_iam_permissions + + def close(self): + self._session.close() + __all__ = ("VpnGatewaysRestTransport",) diff --git a/google/cloud/compute_v1/services/vpn_tunnels/client.py b/google/cloud/compute_v1/services/vpn_tunnels/client.py index 1521275fb..7388bc3f4 100644 --- a/google/cloud/compute_v1/services/vpn_tunnels/client.py +++ b/google/cloud/compute_v1/services/vpn_tunnels/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.vpn_tunnels import pagers from google.cloud.compute_v1.types import compute from .transports.base import VpnTunnelsTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,21 +335,22 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def aggregated_list( self, - request: compute.AggregatedListVpnTunnelsRequest = None, + request: Union[compute.AggregatedListVpnTunnelsRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.AggregatedListPager: r"""Retrieves an aggregated list of VPN tunnels. Args: - request (google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest): + request (Union[google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest, dict]): The request object. A request message for VpnTunnels.AggregatedList. See the method description for details. @@ -399,19 +411,19 @@ def aggregated_list( def delete( self, - request: compute.DeleteVpnTunnelRequest = None, + request: Union[compute.DeleteVpnTunnelRequest, dict] = None, *, project: str = None, region: str = None, vpn_tunnel: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Deletes the specified VpnTunnel resource. Args: - request (google.cloud.compute_v1.types.DeleteVpnTunnelRequest): + request (Union[google.cloud.compute_v1.types.DeleteVpnTunnelRequest, dict]): The request object. A request message for VpnTunnels.Delete. See the method description for details. @@ -494,12 +506,12 @@ def delete( def get( self, - request: compute.GetVpnTunnelRequest = None, + request: Union[compute.GetVpnTunnelRequest, dict] = None, *, project: str = None, region: str = None, vpn_tunnel: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnTunnel: @@ -507,7 +519,7 @@ def get( of available VPN tunnels by making a list() request. Args: - request (google.cloud.compute_v1.types.GetVpnTunnelRequest): + request (Union[google.cloud.compute_v1.types.GetVpnTunnelRequest, dict]): The request object. A request message for VpnTunnels.Get. See the method description for details. project (str): @@ -577,12 +589,12 @@ def get( def insert( self, - request: compute.InsertVpnTunnelRequest = None, + request: Union[compute.InsertVpnTunnelRequest, dict] = None, *, project: str = None, region: str = None, vpn_tunnel_resource: compute.VpnTunnel = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -590,7 +602,7 @@ def insert( and region using the data included in the request. Args: - request (google.cloud.compute_v1.types.InsertVpnTunnelRequest): + request (Union[google.cloud.compute_v1.types.InsertVpnTunnelRequest, dict]): The request object. A request message for VpnTunnels.Insert. See the method description for details. @@ -671,11 +683,11 @@ def insert( def list( self, - request: compute.ListVpnTunnelsRequest = None, + request: Union[compute.ListVpnTunnelsRequest, dict] = None, *, project: str = None, region: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -683,7 +695,7 @@ def list( the specified project and region. Args: - request (google.cloud.compute_v1.types.ListVpnTunnelsRequest): + request (Union[google.cloud.compute_v1.types.ListVpnTunnelsRequest, dict]): The request object. A request message for VpnTunnels.List. See the method description for details. project (str): @@ -750,6 +762,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/vpn_tunnels/pagers.py b/google/cloud/compute_v1/services/vpn_tunnels/pagers.py index f6c2be400..7f63c7c7b 100644 --- a/google/cloud/compute_v1/services/vpn_tunnels/pagers.py +++ b/google/cloud/compute_v1/services/vpn_tunnels/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.VpnTunnelAggregatedList]: + def pages(self) -> Iterator[compute.VpnTunnelAggregatedList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[Tuple[str, compute.VpnTunnelsScopedList]]: + def __iter__(self) -> Iterator[Tuple[str, compute.VpnTunnelsScopedList]]: for page in self.pages: yield from page.items.items() @@ -139,14 +139,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.VpnTunnelList]: + def pages(self) -> Iterator[compute.VpnTunnelList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.VpnTunnel]: + def __iter__(self) -> Iterator[compute.VpnTunnel]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py b/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py index 117cf5efe..29590efd1 100644 --- a/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py +++ b/google/cloud/compute_v1/services/vpn_tunnels/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class VpnTunnelsTransport(abc.ABC): """Abstract transport class for VpnTunnels.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -175,6 +139,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def aggregated_list( self, diff --git a/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py b/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py index aa4426a59..eac31214d 100644 --- a/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py +++ b/google/cloud/compute_v1/services/vpn_tunnels/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import VpnTunnelsTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import VpnTunnelsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class VpnTunnelsRestTransport(VpnTunnelsTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def aggregated_list( + def _aggregated_list( self, request: compute.AggregatedListVpnTunnelsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnTunnelAggregatedList: r"""Call the aggregated list method over HTTP. @@ -112,6 +136,9 @@ def aggregated_list( VpnTunnels.AggregatedList. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -120,32 +147,54 @@ def aggregated_list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/aggregated/vpnTunnels".format( - host=self._host, project=request.project, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/vpnTunnels", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.AggregatedListVpnTunnelsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.AggregatedListVpnTunnelsRequest.to_json( + compute.AggregatedListVpnTunnelsRequest( + transcoded_request["query_params"] + ), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.AggregatedListVpnTunnelsRequest.filter in request: - query_params["filter"] = request.filter - if compute.AggregatedListVpnTunnelsRequest.include_all_scopes in request: - query_params["includeAllScopes"] = request.include_all_scopes - if compute.AggregatedListVpnTunnelsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.AggregatedListVpnTunnelsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.AggregatedListVpnTunnelsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.AggregatedListVpnTunnelsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -157,10 +206,12 @@ def aggregated_list( response.content, ignore_unknown_fields=True ) - def delete( + def _delete( self, request: compute.DeleteVpnTunnelRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the delete method over HTTP. @@ -171,6 +222,9 @@ def delete( VpnTunnels.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -194,25 +248,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}".format( - host=self._host, - project=request.project, - region=request.region, - vpn_tunnel=request.vpn_tunnel, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("vpn_tunnel", "vpnTunnel"), + ] + + request_kwargs = compute.DeleteVpnTunnelRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteVpnTunnelRequest.to_json( + compute.DeleteVpnTunnelRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.DeleteVpnTunnelRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -222,10 +305,12 @@ def delete( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def get( + def _get( self, request: compute.GetVpnTunnelRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnTunnel: r"""Call the get method over HTTP. @@ -235,6 +320,9 @@ def get( The request object. A request message for VpnTunnels.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -246,23 +334,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}".format( - host=self._host, - project=request.project, - region=request.region, - vpn_tunnel=request.vpn_tunnel, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ("vpn_tunnel", "vpnTunnel"), + ] + + request_kwargs = compute.GetVpnTunnelRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetVpnTunnelRequest.to_json( + compute.GetVpnTunnelRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -272,10 +391,12 @@ def get( # Return the response return compute.VpnTunnel.from_json(response.content, ignore_unknown_fields=True) - def insert( + def _insert( self, request: compute.InsertVpnTunnelRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the insert method over HTTP. @@ -286,6 +407,9 @@ def insert( VpnTunnels.Insert. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -309,30 +433,60 @@ def insert( """ + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnTunnels", + "body": "vpn_tunnel_resource", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.InsertVpnTunnelRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + # Jsonify the request body body = compute.VpnTunnel.to_json( - request.vpn_tunnel_resource, + compute.VpnTunnel(transcoded_request["body"]), including_default_value_fields=False, use_integers_for_enums=False, ) - - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnTunnels".format( - host=self._host, project=request.project, region=request.region, + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.InsertVpnTunnelRequest.to_json( + compute.InsertVpnTunnelRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.InsertVpnTunnelRequest.request_id in request: - query_params["requestId"] = request.request_id + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post( - url, headers=headers, params=query_params, data=body, + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -343,10 +497,12 @@ def insert( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListVpnTunnelsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.VpnTunnelList: r"""Call the list method over HTTP. @@ -357,6 +513,9 @@ def list( VpnTunnels.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -367,30 +526,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/regions/{region}/vpnTunnels".format( - host=self._host, project=request.project, region=request.region, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/vpnTunnels", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("region", "region"), + ] + + request_kwargs = compute.ListVpnTunnelsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListVpnTunnelsRequest.to_json( + compute.ListVpnTunnelsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListVpnTunnelsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListVpnTunnelsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListVpnTunnelsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListVpnTunnelsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListVpnTunnelsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -402,5 +584,32 @@ def list( response.content, ignore_unknown_fields=True ) + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListVpnTunnelsRequest], compute.VpnTunnelAggregatedList + ]: + return self._aggregated_list + + @property + def delete(self) -> Callable[[compute.DeleteVpnTunnelRequest], compute.Operation]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetVpnTunnelRequest], compute.VpnTunnel]: + return self._get + + @property + def insert(self) -> Callable[[compute.InsertVpnTunnelRequest], compute.Operation]: + return self._insert + + @property + def list(self) -> Callable[[compute.ListVpnTunnelsRequest], compute.VpnTunnelList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("VpnTunnelsRestTransport",) diff --git a/google/cloud/compute_v1/services/zone_operations/client.py b/google/cloud/compute_v1/services/zone_operations/client.py index 96c49a391..90dff5bd6 100644 --- a/google/cloud/compute_v1/services/zone_operations/client.py +++ b/google/cloud/compute_v1/services/zone_operations/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.zone_operations import pagers from google.cloud.compute_v1.types import compute from .transports.base import ZoneOperationsTransport, DEFAULT_CLIENT_INFO @@ -263,8 +267,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -326,16 +337,17 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def delete( self, - request: compute.DeleteZoneOperationRequest = None, + request: Union[compute.DeleteZoneOperationRequest, dict] = None, *, project: str = None, zone: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteZoneOperationResponse: @@ -343,7 +355,7 @@ def delete( resource. Args: - request (google.cloud.compute_v1.types.DeleteZoneOperationRequest): + request (Union[google.cloud.compute_v1.types.DeleteZoneOperationRequest, dict]): The request object. A request message for ZoneOperations.Delete. See the method description for details. @@ -414,12 +426,12 @@ def delete( def get( self, - request: compute.GetZoneOperationRequest = None, + request: Union[compute.GetZoneOperationRequest, dict] = None, *, project: str = None, zone: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -427,7 +439,7 @@ def get( resource. Args: - request (google.cloud.compute_v1.types.GetZoneOperationRequest): + request (Union[google.cloud.compute_v1.types.GetZoneOperationRequest, dict]): The request object. A request message for ZoneOperations.Get. See the method description for details. @@ -510,11 +522,11 @@ def get( def list( self, - request: compute.ListZoneOperationsRequest = None, + request: Union[compute.ListZoneOperationsRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -522,7 +534,7 @@ def list( within the specified zone. Args: - request (google.cloud.compute_v1.types.ListZoneOperationsRequest): + request (Union[google.cloud.compute_v1.types.ListZoneOperationsRequest, dict]): The request object. A request message for ZoneOperations.List. See the method description for details. @@ -592,12 +604,12 @@ def list( def wait( self, - request: compute.WaitZoneOperationRequest = None, + request: Union[compute.WaitZoneOperationRequest, dict] = None, *, project: str = None, zone: str = None, operation: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: @@ -615,7 +627,7 @@ def wait( the operation is not ``DONE``. Args: - request (google.cloud.compute_v1.types.WaitZoneOperationRequest): + request (Union[google.cloud.compute_v1.types.WaitZoneOperationRequest, dict]): The request object. A request message for ZoneOperations.Wait. See the method description for details. @@ -696,6 +708,19 @@ def wait( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/zone_operations/pagers.py b/google/cloud/compute_v1/services/zone_operations/pagers.py index e4c89f59a..a27f2f3ba 100644 --- a/google/cloud/compute_v1/services/zone_operations/pagers.py +++ b/google/cloud/compute_v1/services/zone_operations/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.OperationList]: + def pages(self) -> Iterator[compute.OperationList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Operation]: + def __iter__(self) -> Iterator[compute.Operation]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/zone_operations/transports/base.py b/google/cloud/compute_v1/services/zone_operations/transports/base.py index 9122556e4..f5acc99e0 100644 --- a/google/cloud/compute_v1/services/zone_operations/transports/base.py +++ b/google/cloud/compute_v1/services/zone_operations/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ZoneOperationsTransport(abc.ABC): """Abstract transport class for ZoneOperations.""" @@ -99,7 +86,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -121,7 +108,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -132,29 +119,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -172,6 +136,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def delete( self, diff --git a/google/cloud/compute_v1/services/zone_operations/transports/rest.py b/google/cloud/compute_v1/services/zone_operations/transports/rest.py index 6e1970eba..c952f017c 100644 --- a/google/cloud/compute_v1/services/zone_operations/transports/rest.py +++ b/google/cloud/compute_v1/services/zone_operations/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ( + ZoneOperationsTransport, + DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, +) -from google.cloud.compute_v1.types import compute -from .base import ZoneOperationsTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ZoneOperationsRestTransport(ZoneOperationsTransport): @@ -53,6 +72,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +100,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +123,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def delete( + def _delete( self, request: compute.DeleteZoneOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.DeleteZoneOperationResponse: r"""Call the delete method over HTTP. @@ -112,6 +139,9 @@ def delete( ZoneOperations.Delete. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -123,23 +153,54 @@ def delete( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/operations/{operation}".format( - host=self._host, - project=request.project, - zone=request.zone, - operation=request.operation, + http_options = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.DeleteZoneOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.DeleteZoneOperationRequest.to_json( + compute.DeleteZoneOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.delete(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -151,10 +212,12 @@ def delete( response.content, ignore_unknown_fields=True ) - def get( + def _get( self, request: compute.GetZoneOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the get method over HTTP. @@ -165,6 +228,9 @@ def get( ZoneOperations.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -188,23 +254,54 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/operations/{operation}".format( - host=self._host, - project=request.project, - zone=request.zone, - operation=request.operation, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/operations/{operation}", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetZoneOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetZoneOperationRequest.to_json( + compute.GetZoneOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -214,10 +311,12 @@ def get( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListZoneOperationsRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.OperationList: r"""Call the list method over HTTP. @@ -228,6 +327,9 @@ def list( ZoneOperations.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -238,30 +340,53 @@ def list( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/operations".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/operations", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.ListZoneOperationsRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListZoneOperationsRequest.to_json( + compute.ListZoneOperationsRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListZoneOperationsRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListZoneOperationsRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListZoneOperationsRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListZoneOperationsRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListZoneOperationsRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -273,10 +398,12 @@ def list( response.content, ignore_unknown_fields=True ) - def wait( + def _wait( self, request: compute.WaitZoneOperationRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Call the wait method over HTTP. @@ -287,6 +414,9 @@ def wait( ZoneOperations.Wait. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -310,23 +440,54 @@ def wait( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}/operations/{operation}/wait".format( - host=self._host, - project=request.project, - zone=request.zone, - operation=request.operation, + http_options = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/operations/{operation}/wait", + }, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("operation", "operation"), + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.WaitZoneOperationRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.WaitZoneOperationRequest.to_json( + compute.WaitZoneOperationRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.post(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -336,5 +497,30 @@ def wait( # Return the response return compute.Operation.from_json(response.content, ignore_unknown_fields=True) + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteZoneOperationRequest], compute.DeleteZoneOperationResponse + ]: + return self._delete + + @property + def get(self) -> Callable[[compute.GetZoneOperationRequest], compute.Operation]: + return self._get + + @property + def list( + self, + ) -> Callable[[compute.ListZoneOperationsRequest], compute.OperationList]: + return self._list + + @property + def wait(self) -> Callable[[compute.WaitZoneOperationRequest], compute.Operation]: + return self._wait + + def close(self): + self._session.close() + __all__ = ("ZoneOperationsRestTransport",) diff --git a/google/cloud/compute_v1/services/zones/client.py b/google/cloud/compute_v1/services/zones/client.py index 1264071d3..2e88c99a7 100644 --- a/google/cloud/compute_v1/services/zones/client.py +++ b/google/cloud/compute_v1/services/zones/client.py @@ -14,22 +14,26 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + from google.cloud.compute_v1.services.zones import pagers from google.cloud.compute_v1.types import compute from .transports.base import ZonesTransport, DEFAULT_CLIENT_INFO @@ -261,8 +265,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None @@ -324,15 +335,16 @@ def __init__( client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, + always_use_jwt_access=True, ) def get( self, - request: compute.GetZoneRequest = None, + request: Union[compute.GetZoneRequest, dict] = None, *, project: str = None, zone: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Zone: @@ -340,7 +352,7 @@ def get( available zones by making a list() request. Args: - request (google.cloud.compute_v1.types.GetZoneRequest): + request (Union[google.cloud.compute_v1.types.GetZoneRequest, dict]): The request object. A request message for Zones.Get. See the method description for details. project (str): @@ -404,10 +416,10 @@ def get( def list( self, - request: compute.ListZonesRequest = None, + request: Union[compute.ListZonesRequest, dict] = None, *, project: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, + retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListPager: @@ -415,7 +427,7 @@ def list( specified project. Args: - request (google.cloud.compute_v1.types.ListZonesRequest): + request (Union[google.cloud.compute_v1.types.ListZonesRequest, dict]): The request object. A request message for Zones.List. See the method description for details. project (str): @@ -474,6 +486,19 @@ def list( # Done; return the response. return response + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( diff --git a/google/cloud/compute_v1/services/zones/pagers.py b/google/cloud/compute_v1/services/zones/pagers.py index 46507215f..d02841d7b 100644 --- a/google/cloud/compute_v1/services/zones/pagers.py +++ b/google/cloud/compute_v1/services/zones/pagers.py @@ -15,13 +15,13 @@ # from typing import ( Any, - AsyncIterable, + AsyncIterator, Awaitable, Callable, - Iterable, Sequence, Tuple, Optional, + Iterator, ) from google.cloud.compute_v1.types import compute @@ -74,14 +74,14 @@ def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property - def pages(self) -> Iterable[compute.ZoneList]: + def pages(self) -> Iterator[compute.ZoneList]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response - def __iter__(self) -> Iterable[compute.Zone]: + def __iter__(self) -> Iterator[compute.Zone]: for page in self.pages: yield from page.items diff --git a/google/cloud/compute_v1/services/zones/transports/base.py b/google/cloud/compute_v1/services/zones/transports/base.py index 3beec763a..7cf38af17 100644 --- a/google/cloud/compute_v1/services/zones/transports/base.py +++ b/google/cloud/compute_v1/services/zones/transports/base.py @@ -15,15 +15,13 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version import pkg_resources -from requests import __version__ as requests_version import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -32,21 +30,10 @@ try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-compute",).version, - grpc_version=None, - rest_version=requests_version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - class ZonesTransport(abc.ABC): """Abstract transport class for Zones.""" @@ -100,7 +87,7 @@ def __init__( host += ":443" self._host = host - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} # Save the scopes. self._scopes = scopes @@ -122,7 +109,7 @@ def __init__( **scopes_kwargs, quota_project_id=quota_project_id ) - # If the credentials is service account credentials, then always try to use self signed JWT. + # If the credentials are service account credentials, then always try to use self signed JWT. if ( always_use_jwt_access and isinstance(credentials, service_account.Credentials) @@ -133,29 +120,6 @@ def __init__( # Save the credentials. self._credentials = credentials - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs( - cls, host: str, scopes: Optional[Sequence[str]] - ) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -167,6 +131,15 @@ def _prep_wrapped_messages(self, client_info): ), } + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + @property def get( self, diff --git a/google/cloud/compute_v1/services/zones/transports/rest.py b/google/cloud/compute_v1/services/zones/transports/rest.py index 1bc003f5f..92cf52cdb 100644 --- a/google/cloud/compute_v1/services/zones/transports/rest.py +++ b/google/cloud/compute_v1/services/zones/transports/rest.py @@ -1,3 +1,22 @@ +from google.auth.transport.requests import AuthorizedSession # type: ignore +import json # type: ignore +import grpc # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.api_core import rest_helpers +from google.api_core import path_template +from google.api_core import gapic_v1 +from requests import __version__ as requests_version +from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + # -*- coding: utf-8 -*- # Copyright 2020 Google LLC # @@ -13,21 +32,18 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple -from google.api_core import gapic_v1 # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import grpc # type: ignore +from google.cloud.compute_v1.types import compute -from google.auth.transport.requests import AuthorizedSession +from .base import ZonesTransport, DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO -from google.cloud.compute_v1.types import compute -from .base import ZonesTransport, DEFAULT_CLIENT_INFO +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=requests_version, +) class ZonesRestTransport(ZonesTransport): @@ -53,6 +69,7 @@ def __init__( quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", ) -> None: """Instantiate the transport. @@ -80,6 +97,11 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. @@ -98,10 +120,12 @@ def __init__( self._session.configure_mtls_channel(client_cert_source_for_mtls) self._prep_wrapped_messages(client_info) - def get( + def _get( self, request: compute.GetZoneRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Zone: r"""Call the get method over HTTP. @@ -111,6 +135,9 @@ def get( The request object. A request message for Zones.Get. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -125,20 +152,50 @@ def get( """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones/{zone}".format( - host=self._host, project=request.project, zone=request.zone, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/zones/{zone}",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ("zone", "zone"), + ] + + request_kwargs = compute.GetZoneRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.GetZoneRequest.to_json( + compute.GetZoneRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -148,10 +205,12 @@ def get( # Return the response return compute.Zone.from_json(response.content, ignore_unknown_fields=True) - def list( + def _list( self, request: compute.ListZonesRequest, *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> compute.ZoneList: r"""Call the list method over HTTP. @@ -161,6 +220,9 @@ def list( The request object. A request message for Zones.List. See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -169,30 +231,49 @@ def list( Contains a list of zone resources. """ - # TODO(yon-mg): need to handle grpc transcoding and parse url correctly - # current impl assumes basic case of grpc transcoding - url = "https://{host}/compute/v1/projects/{project}/zones".format( - host=self._host, project=request.project, + http_options = [ + {"method": "get", "uri": "/compute/v1/projects/{project}/zones",}, + ] + + required_fields = [ + # (snake_case_name, camel_case_name) + ("project", "project"), + ] + + request_kwargs = compute.ListZonesRequest.to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params = json.loads( + compute.ListZonesRequest.to_json( + compute.ListZonesRequest(transcoded_request["query_params"]), + including_default_value_fields=False, + use_integers_for_enums=False, + ) ) - # TODO(yon-mg): handle nested fields corerctly rather than using only top level fields - # not required for GCE - query_params = {} - if compute.ListZonesRequest.filter in request: - query_params["filter"] = request.filter - if compute.ListZonesRequest.max_results in request: - query_params["maxResults"] = request.max_results - if compute.ListZonesRequest.order_by in request: - query_params["orderBy"] = request.order_by - if compute.ListZonesRequest.page_token in request: - query_params["pageToken"] = request.page_token - if compute.ListZonesRequest.return_partial_success in request: - query_params["returnPartialSuccess"] = request.return_partial_success + # Ensure required fields have values in query_params. + # If a required field has a default value, it can get lost + # by the to_json call above. + orig_query_params = transcoded_request["query_params"] + for snake_case_name, camel_case_name in required_fields: + if snake_case_name in orig_query_params: + if camel_case_name not in query_params: + query_params[camel_case_name] = orig_query_params[snake_case_name] # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" - response = self._session.get(url, headers=headers, params=query_params,) + response = getattr(self._session, method)( + # Replace with proper schema configuration (http/https) logic + "https://{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception # subclass. @@ -202,5 +283,16 @@ def list( # Return the response return compute.ZoneList.from_json(response.content, ignore_unknown_fields=True) + @property + def get(self) -> Callable[[compute.GetZoneRequest], compute.Zone]: + return self._get + + @property + def list(self) -> Callable[[compute.ListZonesRequest], compute.ZoneList]: + return self._list + + def close(self): + self._session.close() + __all__ = ("ZonesRestTransport",) diff --git a/google/cloud/compute_v1/types/compute.py b/google/cloud/compute_v1/types/compute.py index 219e73044..febcbedb7 100644 --- a/google/cloud/compute_v1/types/compute.py +++ b/google/cloud/compute_v1/types/compute.py @@ -1188,6 +1188,8 @@ class AbandonInstancesInstanceGroupManagerRequest(proto.Message): A request message for InstanceGroupManagers.AbandonInstances. See the method description for details. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: instance_group_manager (str): The name of the managed instance group. @@ -1211,6 +1213,8 @@ class AbandonInstancesInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. @@ -1257,6 +1261,8 @@ class AbandonInstancesRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -1278,6 +1284,8 @@ class AcceleratorConfig(proto.Message): accelerator_count (int): The number of the guest accelerator cards exposed to this instance. + + This field is a member of `oneof`_ ``_accelerator_count``. accelerator_type (str): Full or partial URL of the accelerator type resource to attach to this instance. For @@ -1286,6 +1294,8 @@ class AcceleratorConfig(proto.Message): you are creating an instance template, specify only the accelerator name. See GPUs on Compute Engine for a full list of accelerator types. + + This field is a member of `oneof`_ ``_accelerator_type``. """ accelerator_count = proto.Field(proto.INT32, number=504879675, optional=True,) @@ -1302,31 +1312,49 @@ class AcceleratorType(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): [Output Only] The deprecation status associated with this accelerator type. + + This field is a member of `oneof`_ ``_deprecated``. description (str): [Output Only] An optional textual description of the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] The type of the resource. Always compute#acceleratorType for accelerator types. + + This field is a member of `oneof`_ ``_kind``. maximum_cards_per_instance (int): [Output Only] Maximum number of accelerator cards allowed per instance. + + This field is a member of `oneof`_ ``_maximum_cards_per_instance``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. self_link (str): [Output Only] Server-defined, fully qualified URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. zone (str): [Output Only] The name of the zone where the accelerator type resides, such as us-central1-a. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -1346,10 +1374,13 @@ class AcceleratorType(proto.Message): class AcceleratorTypeAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.AcceleratorTypeAggregatedList.ItemsEntry]): A list of AcceleratorTypesScopedList resources. @@ -1357,6 +1388,8 @@ class AcceleratorTypeAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#acceleratorTypeAggregatedList for aggregated lists of accelerator types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -1364,12 +1397,18 @@ class AcceleratorTypeAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -1394,15 +1433,20 @@ def raw_page(self): class AcceleratorTypeList(proto.Message): r"""Contains a list of accelerator types. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.AcceleratorType]): A list of AcceleratorType resources. kind (str): [Output Only] Type of resource. Always compute#acceleratorTypeList for lists of accelerator types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -1410,10 +1454,16 @@ class AcceleratorTypeList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -1434,6 +1484,7 @@ def raw_page(self): class AcceleratorTypesScopedList(proto.Message): r""" + Attributes: accelerator_types (Sequence[google.cloud.compute_v1.types.AcceleratorType]): [Output Only] A list of accelerator types contained in this @@ -1441,6 +1492,8 @@ class AcceleratorTypesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that appears when the accelerator types list is empty. + + This field is a member of `oneof`_ ``_warning``. """ accelerator_types = proto.RepeatedField( @@ -1453,13 +1506,18 @@ class AcceleratorTypesScopedList(proto.Message): class Accelerators(proto.Message): r""" + Attributes: guest_accelerator_count (int): Number of accelerator cards exposed to the guest. + + This field is a member of `oneof`_ ``_guest_accelerator_count``. guest_accelerator_type (str): The accelerator type resource name, not a full URL, e.g. 'nvidia-tesla-k80'. + + This field is a member of `oneof`_ ``_guest_accelerator_type``. """ guest_accelerator_count = proto.Field(proto.INT32, number=479079316, optional=True,) @@ -1477,16 +1535,24 @@ class AccessConfig(proto.Message): in externalIpv6PrefixLength in ipv6AccessConfig. The field is output only, an IPv6 address from a subnetwork associated with the instance will be allocated dynamically. + + This field is a member of `oneof`_ ``_external_ipv6``. external_ipv6_prefix_length (int): [Output Only] The prefix length of the external IPv6 range. + + This field is a member of `oneof`_ ``_external_ipv6_prefix_length``. kind (str): [Output Only] Type of the resource. Always compute#accessConfig for access configs. + + This field is a member of `oneof`_ ``_kind``. name (str): The name of this access configuration. The default and recommended name is External NAT, but you can use any arbitrary string, such as My external IP or Network Access. + + This field is a member of `oneof`_ ``_name``. nat_i_p (str): An external IP address associated with this instance. Specify an unused static external IP @@ -1495,6 +1561,8 @@ class AccessConfig(proto.Message): ephemeral IP address pool. If you specify a static external IP address, it must live in the same region as the zone of the instance. + + This field is a member of `oneof`_ ``_nat_i_p``. network_tier (google.cloud.compute_v1.types.AccessConfig.NetworkTier): This signifies the networking tier used for configuring this access configuration and can @@ -1506,16 +1574,24 @@ class AccessConfig(proto.Message): external IP address is specified, it must match that of the networkTier associated with the Address resource owning that IP. + + This field is a member of `oneof`_ ``_network_tier``. public_ptr_domain_name (str): The DNS domain name for the public PTR record. You can set this field only if the ``setPublicPtr`` field is enabled. + + This field is a member of `oneof`_ ``_public_ptr_domain_name``. set_public_ptr (bool): Specifies whether a public DNS 'PTR' record should be created to map the external IP address of the instance to a DNS domain name. + + This field is a member of `oneof`_ ``_set_public_ptr``. type_ (google.cloud.compute_v1.types.AccessConfig.Type): The type of configuration. The default and only option is ONE_TO_ONE_NAT. + + This field is a member of `oneof`_ ``_type``. """ class NetworkTier(proto.Enum): @@ -1584,6 +1660,8 @@ class AddAccessConfigInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -1613,6 +1691,8 @@ class AddAssociationFirewallPolicyRequest(proto.Message): This is false by default, in which case an error will be returned if an association already exists. + + This field is a member of `oneof`_ ``_replace_existing_association``. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -1629,6 +1709,8 @@ class AddAssociationFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -1666,6 +1748,8 @@ class AddHealthCheckTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool (str): Name of the target pool to add a health check to. @@ -1707,6 +1791,8 @@ class AddInstanceTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool (str): Name of the TargetPool resource to add instances to. @@ -1751,6 +1837,8 @@ class AddInstancesInstanceGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the instance group is located. @@ -1792,6 +1880,8 @@ class AddNodesNodeGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -1833,6 +1923,8 @@ class AddPeeringNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network = proto.Field(proto.STRING, number=232872494,) @@ -1870,6 +1962,8 @@ class AddResourcePoliciesDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -1910,6 +2004,8 @@ class AddResourcePoliciesInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -1952,6 +2048,8 @@ class AddResourcePoliciesRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ disk = proto.Field(proto.STRING, number=3083677,) @@ -1990,6 +2088,8 @@ class AddRuleFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -2046,6 +2146,8 @@ class AddSignedUrlKeyBackendBucketRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): The body resource for this request """ @@ -2085,6 +2187,8 @@ class AddSignedUrlKeyBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. signed_url_key_resource (google.cloud.compute_v1.types.SignedUrlKey): The body resource for this request """ @@ -2109,25 +2213,39 @@ class Address(proto.Message): address (str): The static IP address represented by this resource. + + This field is a member of `oneof`_ ``_address``. address_type (google.cloud.compute_v1.types.Address.AddressType): The type of address to reserve, either INTERNAL or EXTERNAL. If unspecified, defaults to EXTERNAL. + + This field is a member of `oneof`_ ``_address_type``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. ip_version (google.cloud.compute_v1.types.Address.IpVersion): The IP version that will be used by this address. Valid options are IPV4 or IPV6. This can only be specified for a global address. + + This field is a member of `oneof`_ ``_ip_version``. kind (str): [Output Only] Type of the resource. Always compute#address for addresses. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -2138,10 +2256,14 @@ class Address(proto.Message): the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit. + + This field is a member of `oneof`_ ``_name``. network (str): The URL of the network in which to reserve the address. This field can only be used with INTERNAL type with the VPC_PEERING purpose. + + This field is a member of `oneof`_ ``_network``. network_tier (google.cloud.compute_v1.types.Address.NetworkTier): This signifies the networking tier used for configuring this address and can only take the @@ -2151,9 +2273,13 @@ class Address(proto.Message): regional external IP addresses can be either Standard or Premium Tier. If this field is not specified, it is assumed to be PREMIUM. + + This field is a member of `oneof`_ ``_network_tier``. prefix_length (int): The prefix length if the resource represents an IP range. + + This field is a member of `oneof`_ ``_prefix_length``. purpose (google.cloud.compute_v1.types.Address.Purpose): The purpose of this resource, which can be one of the following values: - GCE_ENDPOINT for addresses that are used @@ -2174,13 +2300,19 @@ class Address(proto.Message): ``PRIVATE_SERVICE_CONNECT`` for a private network address that is used to configure Private Service Connect. Only global internal addresses can use this purpose. + + This field is a member of `oneof`_ ``_purpose``. region (str): [Output Only] The URL of the region where a regional address resides. For regional addresses, you must specify the region as a path parameter in the HTTP request URL. *This field is not applicable to global addresses.* + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.Address.Status): [Output Only] The status of the address, which can be one of RESERVING, RESERVED, or IN_USE. An address that is RESERVING @@ -2188,11 +2320,15 @@ class Address(proto.Message): address is currently reserved and available to use. An IN_USE address is currently being used by another resource and is not available. + + This field is a member of `oneof`_ ``_status``. subnetwork (str): The URL of the subnetwork in which to reserve the address. If an IP address is specified, it must be within the subnetwork's IP range. This field can only be used with INTERNAL type with a GCE_ENDPOINT or DNS_RESOLVER purpose. + + This field is a member of `oneof`_ ``_subnetwork``. users (Sequence[str]): [Output Only] The URLs of the resources that are using this address. @@ -2296,16 +2432,21 @@ class Status(proto.Enum): class AddressAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.AddressAggregatedList.ItemsEntry]): A list of AddressesScopedList resources. kind (str): [Output Only] Type of resource. Always compute#addressAggregatedList for aggregated lists of addresses. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -2313,12 +2454,18 @@ class AddressAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -2340,15 +2487,20 @@ def raw_page(self): class AddressList(proto.Message): r"""Contains a list of addresses. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Address]): A list of Address resources. kind (str): [Output Only] Type of resource. Always compute#addressList for lists of addresses. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -2356,10 +2508,16 @@ class AddressList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -2378,12 +2536,15 @@ def raw_page(self): class AddressesScopedList(proto.Message): r""" + Attributes: addresses (Sequence[google.cloud.compute_v1.types.Address]): [Output Only] A list of addresses contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ addresses = proto.RepeatedField(proto.MESSAGE, number=337673122, message="Address",) @@ -2404,12 +2565,16 @@ class AdvancedMachineFeatures(proto.Message): enable_nested_virtualization (bool): Whether to enable nested virtualization or not (default is false). + + This field is a member of `oneof`_ ``_enable_nested_virtualization``. threads_per_core (int): The number of threads per physical core. To disable simultaneous multithreading (SMT) set this to 1. If unset, the maximum number of threads supported per core by the underlying processor is assumed. + + This field is a member of `oneof`_ ``_threads_per_core``. """ enable_nested_virtualization = proto.Field( @@ -2444,6 +2609,8 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2455,6 +2622,8 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2462,6 +2631,8 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2473,16 +2644,22 @@ class AggregatedListAcceleratorTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2520,6 +2697,8 @@ class AggregatedListAddressesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2531,6 +2710,8 @@ class AggregatedListAddressesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2538,6 +2719,8 @@ class AggregatedListAddressesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2549,16 +2732,22 @@ class AggregatedListAddressesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2596,6 +2785,8 @@ class AggregatedListAutoscalersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2607,6 +2798,8 @@ class AggregatedListAutoscalersRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2614,6 +2807,8 @@ class AggregatedListAutoscalersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2625,16 +2820,22 @@ class AggregatedListAutoscalersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2672,6 +2873,8 @@ class AggregatedListBackendServicesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2683,6 +2886,8 @@ class AggregatedListBackendServicesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2690,6 +2895,8 @@ class AggregatedListBackendServicesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2701,16 +2908,22 @@ class AggregatedListBackendServicesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2748,6 +2961,8 @@ class AggregatedListDiskTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2759,6 +2974,8 @@ class AggregatedListDiskTypesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2766,6 +2983,8 @@ class AggregatedListDiskTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2777,16 +2996,22 @@ class AggregatedListDiskTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2824,6 +3049,8 @@ class AggregatedListDisksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2835,6 +3062,8 @@ class AggregatedListDisksRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2842,6 +3071,8 @@ class AggregatedListDisksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2853,16 +3084,22 @@ class AggregatedListDisksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2900,6 +3137,8 @@ class AggregatedListForwardingRulesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2911,6 +3150,8 @@ class AggregatedListForwardingRulesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2918,6 +3159,8 @@ class AggregatedListForwardingRulesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -2929,16 +3172,22 @@ class AggregatedListForwardingRulesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -2976,6 +3225,8 @@ class AggregatedListGlobalOperationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -2987,6 +3238,8 @@ class AggregatedListGlobalOperationsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -2994,6 +3247,8 @@ class AggregatedListGlobalOperationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3005,16 +3260,22 @@ class AggregatedListGlobalOperationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3052,6 +3313,8 @@ class AggregatedListHealthChecksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3063,6 +3326,8 @@ class AggregatedListHealthChecksRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3070,6 +3335,8 @@ class AggregatedListHealthChecksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3081,16 +3348,22 @@ class AggregatedListHealthChecksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3128,6 +3401,8 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3139,6 +3414,8 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3146,6 +3423,8 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3157,16 +3436,22 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3204,6 +3489,8 @@ class AggregatedListInstanceGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3215,6 +3502,8 @@ class AggregatedListInstanceGroupsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3222,6 +3511,8 @@ class AggregatedListInstanceGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3233,16 +3524,22 @@ class AggregatedListInstanceGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3280,6 +3577,8 @@ class AggregatedListInstancesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3291,6 +3590,8 @@ class AggregatedListInstancesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3298,6 +3599,8 @@ class AggregatedListInstancesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3309,16 +3612,22 @@ class AggregatedListInstancesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3356,6 +3665,8 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3367,6 +3678,8 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3374,6 +3687,8 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3385,16 +3700,22 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3432,6 +3753,8 @@ class AggregatedListMachineTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3443,6 +3766,8 @@ class AggregatedListMachineTypesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3450,6 +3775,8 @@ class AggregatedListMachineTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3461,16 +3788,22 @@ class AggregatedListMachineTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3508,6 +3841,8 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3519,6 +3854,8 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3526,6 +3863,8 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3537,16 +3876,22 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3584,6 +3929,8 @@ class AggregatedListNodeGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3595,6 +3942,8 @@ class AggregatedListNodeGroupsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3602,6 +3951,8 @@ class AggregatedListNodeGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3613,16 +3964,22 @@ class AggregatedListNodeGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3660,6 +4017,8 @@ class AggregatedListNodeTemplatesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3671,6 +4030,8 @@ class AggregatedListNodeTemplatesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3678,6 +4039,8 @@ class AggregatedListNodeTemplatesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3689,16 +4052,22 @@ class AggregatedListNodeTemplatesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3736,6 +4105,8 @@ class AggregatedListNodeTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3747,6 +4118,8 @@ class AggregatedListNodeTypesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3754,6 +4127,8 @@ class AggregatedListNodeTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3765,16 +4140,22 @@ class AggregatedListNodeTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3812,6 +4193,8 @@ class AggregatedListPacketMirroringsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3823,6 +4206,8 @@ class AggregatedListPacketMirroringsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3830,6 +4215,8 @@ class AggregatedListPacketMirroringsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3841,16 +4228,22 @@ class AggregatedListPacketMirroringsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3888,6 +4281,8 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3899,6 +4294,8 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3906,6 +4303,8 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3917,16 +4316,22 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -3964,6 +4369,8 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -3975,6 +4382,8 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -3982,6 +4391,8 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -3993,16 +4404,22 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4040,6 +4457,8 @@ class AggregatedListReservationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4051,6 +4470,8 @@ class AggregatedListReservationsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4058,6 +4479,8 @@ class AggregatedListReservationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4069,16 +4492,22 @@ class AggregatedListReservationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4116,6 +4545,8 @@ class AggregatedListResourcePoliciesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4127,6 +4558,8 @@ class AggregatedListResourcePoliciesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4134,6 +4567,8 @@ class AggregatedListResourcePoliciesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4145,16 +4580,22 @@ class AggregatedListResourcePoliciesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4192,6 +4633,8 @@ class AggregatedListRoutersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4203,6 +4646,8 @@ class AggregatedListRoutersRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4210,6 +4655,8 @@ class AggregatedListRoutersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4221,16 +4668,22 @@ class AggregatedListRoutersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4268,6 +4721,8 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4279,6 +4734,8 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4286,6 +4743,8 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4297,16 +4756,22 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4344,6 +4809,8 @@ class AggregatedListSslCertificatesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4355,6 +4822,8 @@ class AggregatedListSslCertificatesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4362,6 +4831,8 @@ class AggregatedListSslCertificatesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4373,16 +4844,22 @@ class AggregatedListSslCertificatesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4420,6 +4897,8 @@ class AggregatedListSubnetworksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4431,6 +4910,8 @@ class AggregatedListSubnetworksRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4438,6 +4919,8 @@ class AggregatedListSubnetworksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4449,16 +4932,22 @@ class AggregatedListSubnetworksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4496,6 +4985,8 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4507,6 +4998,8 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4514,6 +5007,8 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4525,16 +5020,22 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4572,6 +5073,8 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4583,6 +5086,8 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4590,6 +5095,8 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4601,16 +5108,22 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4648,6 +5161,8 @@ class AggregatedListTargetInstancesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4659,6 +5174,8 @@ class AggregatedListTargetInstancesRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4666,6 +5183,8 @@ class AggregatedListTargetInstancesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4677,16 +5196,22 @@ class AggregatedListTargetInstancesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4724,6 +5249,8 @@ class AggregatedListTargetPoolsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4735,6 +5262,8 @@ class AggregatedListTargetPoolsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4742,6 +5271,8 @@ class AggregatedListTargetPoolsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4753,16 +5284,22 @@ class AggregatedListTargetPoolsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4800,6 +5337,8 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4811,6 +5350,8 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4818,6 +5359,8 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4829,16 +5372,22 @@ class AggregatedListTargetVpnGatewaysRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4876,6 +5425,8 @@ class AggregatedListUrlMapsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4887,6 +5438,8 @@ class AggregatedListUrlMapsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4894,6 +5447,8 @@ class AggregatedListUrlMapsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4905,16 +5460,22 @@ class AggregatedListUrlMapsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -4952,6 +5513,8 @@ class AggregatedListVpnGatewaysRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -4963,6 +5526,8 @@ class AggregatedListVpnGatewaysRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -4970,6 +5535,8 @@ class AggregatedListVpnGatewaysRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -4981,16 +5548,22 @@ class AggregatedListVpnGatewaysRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -5028,6 +5601,8 @@ class AggregatedListVpnTunnelsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. include_all_scopes (bool): Indicates whether every visible scope for each scope type (zone, region, global) should be @@ -5039,6 +5614,8 @@ class AggregatedListVpnTunnelsRequest(proto.Message): this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -5046,6 +5623,8 @@ class AggregatedListVpnTunnelsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -5057,16 +5636,22 @@ class AggregatedListVpnTunnelsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -5092,11 +5677,15 @@ class AliasIpRange(proto.Message): IP address (such as 10.2.3.4), a netmask (such as /24) or a CIDR-formatted string (such as 10.1.2.0/24). + + This field is a member of `oneof`_ ``_ip_cidr_range``. subnetwork_range_name (str): The name of a subnetwork secondary IP range from which to allocate an IP alias range. If not specified, the primary range of the subnetwork is used. + + This field is a member of `oneof`_ ``_subnetwork_range_name``. """ ip_cidr_range = proto.Field(proto.STRING, number=98117322, optional=True,) @@ -5107,15 +5696,20 @@ class AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk( proto.Message ): r""" + Attributes: disk_size_gb (int): Specifies the size of the disk in base-2 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. interface (google.cloud.compute_v1.types.AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk.Interface): Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. For performance characteristics of SCSI over NVMe, see Local SSD performance. + + This field is a member of `oneof`_ ``_interface``. """ class Interface(proto.Enum): @@ -5136,6 +5730,7 @@ class Interface(proto.Enum): class AllocationSpecificSKUAllocationReservedInstanceProperties(proto.Message): r"""Properties of the SKU instances being reserved. Next ID: 9 + Attributes: guest_accelerators (Sequence[google.cloud.compute_v1.types.AcceleratorConfig]): Specifies accelerator type and count. @@ -5147,13 +5742,19 @@ class AllocationSpecificSKUAllocationReservedInstanceProperties(proto.Message): allocation close to other resources. This field is for use by internal tools that use the public API. + + This field is a member of `oneof`_ ``_location_hint``. machine_type (str): Specifies type of machine (name only) which has fixed number of vCPUs and fixed amount of memory. This also includes specifying custom machine type following custom-NUMBER_OF_CPUS-AMOUNT_OF_MEMORY pattern. + + This field is a member of `oneof`_ ``_machine_type``. min_cpu_platform (str): Minimum cpu platform the reservation. + + This field is a member of `oneof`_ ``_min_cpu_platform``. """ guest_accelerators = proto.RepeatedField( @@ -5177,10 +5778,16 @@ class AllocationSpecificSKUReservation(proto.Message): count (int): Specifies the number of resources that are allocated. + + This field is a member of `oneof`_ ``_count``. in_use_count (int): [Output Only] Indicates how many instances are in use. + + This field is a member of `oneof`_ ``_in_use_count``. instance_properties (google.cloud.compute_v1.types.AllocationSpecificSKUAllocationReservedInstanceProperties): The instance properties for the reservation. + + This field is a member of `oneof`_ ``_instance_properties``. """ count = proto.Field(proto.INT64, number=94851343, optional=True,) @@ -5195,6 +5802,7 @@ class AllocationSpecificSKUReservation(proto.Message): class Allowed(proto.Message): r""" + Attributes: I_p_protocol (str): The IP protocol to which this rule applies. @@ -5203,6 +5811,8 @@ class Allowed(proto.Message): the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp) or the IP protocol number. + + This field is a member of `oneof`_ ``_I_p_protocol``. ports (Sequence[str]): An optional list of ports to which this rule applies. This field is only applicable for the UDP or TCP protocol. Each @@ -5285,6 +5895,8 @@ class AttachDiskInstanceRequest(proto.Message): even if it's currently attached to another instance. If you try to force attach a zonal disk to an instance, you will receive an error. + + This field is a member of `oneof`_ ``_force_attach``. instance (str): The instance name for this request. project (str): @@ -5305,6 +5917,8 @@ class AttachDiskInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -5349,6 +5963,8 @@ class AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ global_network_endpoint_groups_attach_endpoints_request_resource = proto.Field( @@ -5391,6 +6007,8 @@ class AttachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the network endpoint group is located. It should comply with @@ -5410,15 +6028,20 @@ class AttachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): class AttachedDisk(proto.Message): r"""An instance-attached disk resource. + Attributes: auto_delete (bool): Specifies whether the disk will be auto- eleted when the instance is deleted (but not when the disk is detached from the instance). + + This field is a member of `oneof`_ ``_auto_delete``. boot (bool): Indicates that this is a boot disk. The virtual machine will use the first partition of the disk for its root filesystem. + + This field is a member of `oneof`_ ``_boot``. device_name (str): Specifies a unique device name of your choice that is reflected into the /dev/disk/by-id/google-\* tree of a Linux @@ -5429,6 +6052,8 @@ class AttachedDisk(proto.Message): in the form persistent-disk-x, where x is a number assigned by Google Compute Engine. This field is only applicable for persistent disks. + + This field is a member of `oneof`_ ``_device_name``. disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): Encrypts or decrypts a disk using a customer- upplied encryption key. If you are creating a @@ -5450,8 +6075,12 @@ class AttachedDisk(proto.Message): templates do not store customer-supplied encryption keys, so you cannot use your own keys to encrypt disks in a managed instance group. + + This field is a member of `oneof`_ ``_disk_encryption_key``. disk_size_gb (int): The size of the disk in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. guest_os_features (Sequence[google.cloud.compute_v1.types.GuestOsFeature]): A list of features to enable on the guest operating system. Applicable only for bootable @@ -5461,6 +6090,8 @@ class AttachedDisk(proto.Message): [Output Only] A zero-based index to this disk, where 0 is reserved for the boot disk. If you have many disks attached to an instance, each disk would have a unique index number. + + This field is a member of `oneof`_ ``_index``. initialize_params (google.cloud.compute_v1.types.AttachedDiskInitializeParams): [Input Only] Specifies the parameters for a new disk that will be created alongside the new instance. Use @@ -5468,6 +6099,8 @@ class AttachedDisk(proto.Message): attached to the new instance. This property is mutually exclusive with the source property; you can only define one or the other, but not both. + + This field is a member of `oneof`_ ``_initialize_params``. interface (google.cloud.compute_v1.types.AttachedDisk.Interface): Specifies the disk interface to use for attaching this disk, which is either SCSI or @@ -5477,17 +6110,25 @@ class AttachedDisk(proto.Message): format than SCSI. Local SSDs can use either NVME or SCSI. For performance characteristics of SCSI over NVMe, see Local SSD performance. + + This field is a member of `oneof`_ ``_interface``. kind (str): [Output Only] Type of the resource. Always compute#attachedDisk for attached disks. + + This field is a member of `oneof`_ ``_kind``. licenses (Sequence[str]): [Output Only] Any valid publicly visible licenses. mode (google.cloud.compute_v1.types.AttachedDisk.Mode): The mode in which to attach this disk, either READ_WRITE or READ_ONLY. If not specified, the default is to attach the disk in READ_WRITE mode. + + This field is a member of `oneof`_ ``_mode``. shielded_instance_initial_state (google.cloud.compute_v1.types.InitialStateConfig): [Output Only] shielded vm initial state stored on disk + + This field is a member of `oneof`_ ``_shielded_instance_initial_state``. source (str): Specifies a valid partial or full URL to an existing Persistent Disk resource. When creating @@ -5500,10 +6141,14 @@ class AttachedDisk(proto.Message): applicable for persistent disks. Note that for InstanceTemplate, specify the disk name, not the URL for the disk. + + This field is a member of `oneof`_ ``_source``. type_ (google.cloud.compute_v1.types.AttachedDisk.Type): Specifies the type of the disk, either SCRATCH or PERSISTENT. If not specified, the default is PERSISTENT. + + This field is a member of `oneof`_ ``_type``. """ class Interface(proto.Enum): @@ -5576,6 +6221,8 @@ class AttachedDiskInitializeParams(proto.Message): description (str): An optional description. Provide this property when creating the disk. + + This field is a member of `oneof`_ ``_description``. disk_name (str): Specifies the disk name. If not specified, the default is to use the name of the instance. @@ -5583,6 +6230,8 @@ class AttachedDiskInitializeParams(proto.Message): the given region, the existing disk is attached to the new instance and the new disk is not created. + + This field is a member of `oneof`_ ``_disk_name``. disk_size_gb (int): Specifies the size of the disk in base-2 GB. The size must be at least 10 GB. If you specify @@ -5590,6 +6239,8 @@ class AttachedDiskInitializeParams(proto.Message): the default size is the size of the sourceImage. If you do not specify a sourceImage, the default disk size is 500 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. disk_type (str): Specifies the disk type to use to create the instance. If not specified, the default is pd- @@ -5607,6 +6258,8 @@ class AttachedDiskInitializeParams(proto.Message): zones/zone/diskTypes/diskType Note that for InstanceTemplate, this is the name of the disk type, not URL. + + This field is a member of `oneof`_ ``_disk_type``. labels (Sequence[google.cloud.compute_v1.types.AttachedDiskInitializeParams.LabelsEntry]): Labels to apply to this disk. These can be later modified by the disks.setLabels method. @@ -5616,12 +6269,16 @@ class AttachedDiskInitializeParams(proto.Message): Specifies which action to take on instance update with this disk. Default is to use the existing disk. + + This field is a member of `oneof`_ ``_on_update_action``. provisioned_iops (int): Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. Values must be between 10,000 and 120,000. For more details, see the Extreme persistent disk documentation. + + This field is a member of `oneof`_ ``_provisioned_iops``. resource_policies (Sequence[str]): Resource policies applied to this disk for automatic snapshot creations. Specified using @@ -5650,6 +6307,8 @@ class AttachedDiskInitializeParams(proto.Message): family/family-name: global/images/family/my- image-family If the source image is deleted later, this field will not be set. + + This field is a member of `oneof`_ ``_source_image``. source_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source image. Required if the source image is @@ -5659,6 +6318,8 @@ class AttachedDiskInitializeParams(proto.Message): disks for instances in a managed instance group if the source images are encrypted with your own keys. + + This field is a member of `oneof`_ ``_source_image_encryption_key``. source_snapshot (str): The source snapshot to create this disk. When creating a new instance, one of @@ -5670,9 +6331,13 @@ class AttachedDiskInitializeParams(proto.Message): global/snapshots/my-backup If the source snapshot is deleted later, this field will not be set. + + This field is a member of `oneof`_ ``_source_snapshot``. source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source snapshot. + + This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. """ class OnUpdateAction(proto.Enum): @@ -5736,6 +6401,8 @@ class AuditConfig(proto.Message): For example, ``storage.googleapis.com``, ``cloudsql.googleapis.com``. ``allServices`` is a special value that covers all services. + + This field is a member of `oneof`_ ``_service``. """ audit_log_configs = proto.RepeatedField( @@ -5760,8 +6427,12 @@ class AuditLogConfig(proto.Message): ignore_child_exemptions (bool): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_ignore_child_exemptions``. log_type (google.cloud.compute_v1.types.AuditLogConfig.LogType): The log type that this config enables. + + This field is a member of `oneof`_ ``_log_type``. """ class LogType(proto.Enum): @@ -5779,10 +6450,13 @@ class LogType(proto.Enum): class AuthorizationLoggingOptions(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: permission_type (google.cloud.compute_v1.types.AuthorizationLoggingOptions.PermissionType): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_permission_type``. """ class PermissionType(proto.Enum): @@ -5820,18 +6494,28 @@ class Autoscaler(proto.Message): loadBalancingUtilization. If none of these are specified, the default will be to autoscale based on cpuUtilization to 0.6 or 60%. + + This field is a member of `oneof`_ ``_autoscaling_policy``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#autoscaler for autoscalers. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -5841,6 +6525,8 @@ class Autoscaler(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. recommended_size (int): [Output Only] Target recommended MIG size (number of instances) computed by autoscaler. Autoscaler calculates the @@ -5848,14 +6534,20 @@ class Autoscaler(proto.Message): is different from ON. This field is empty when autoscaler is not connected to an existing managed instance group or autoscaler did not generate its prediction. + + This field is a member of `oneof`_ ``_recommended_size``. region (str): [Output Only] URL of the region where the instance group resides (for autoscalers living in regional scope). + + This field is a member of `oneof`_ ``_region``. scaling_schedule_status (Sequence[google.cloud.compute_v1.types.Autoscaler.ScalingScheduleStatusEntry]): [Output Only] Status information of existing scaling schedules. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.Autoscaler.Status): [Output Only] The status of the autoscaler configuration. Current set of possible values: - PENDING: Autoscaler @@ -5866,6 +6558,8 @@ class Autoscaler(proto.Message): errors. Actionable for users. Details are present in the statusDetails field. New values might be added in the future. + + This field is a member of `oneof`_ ``_status``. status_details (Sequence[google.cloud.compute_v1.types.AutoscalerStatusDetails]): [Output Only] Human-readable details about the current state of the autoscaler. Read the documentation for Commonly @@ -5875,9 +6569,13 @@ class Autoscaler(proto.Message): URL of the managed instance group that this autoscaler will scale. This field is required when creating an autoscaler. + + This field is a member of `oneof`_ ``_target``. zone (str): [Output Only] URL of the zone where the instance group resides (for autoscalers living in zonal scope). + + This field is a member of `oneof`_ ``_zone``. """ class Status(proto.Enum): @@ -5919,16 +6617,21 @@ class Status(proto.Enum): class AutoscalerAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.AutoscalerAggregatedList.ItemsEntry]): A list of AutoscalersScopedList resources. kind (str): [Output Only] Type of resource. Always compute#autoscalerAggregatedList for aggregated lists of autoscalers. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -5936,13 +6639,19 @@ class AutoscalerAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. end_interface: MixerListResponseWithEtagBuilder warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -5964,15 +6673,20 @@ def raw_page(self): class AutoscalerList(proto.Message): r"""Contains a list of Autoscaler resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Autoscaler]): A list of Autoscaler resources. kind (str): [Output Only] Type of resource. Always compute#autoscalerList for lists of autoscalers. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -5980,10 +6694,16 @@ class AutoscalerList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -6002,9 +6722,12 @@ def raw_page(self): class AutoscalerStatusDetails(proto.Message): r""" + Attributes: message (str): The status message. + + This field is a member of `oneof`_ ``_message``. type_ (google.cloud.compute_v1.types.AutoscalerStatusDetails.Type): The type of error, warning, or notice returned. Current set of possible values: - ALL_INSTANCES_UNHEALTHY (WARNING): All @@ -6048,6 +6771,8 @@ class AutoscalerStatusDetails(proto.Message): one of the zones you're using there is a resource stockout. New values might be added in the future. Some of the values might not be available in all API versions. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -6118,12 +6843,15 @@ class Type(proto.Enum): class AutoscalersScopedList(proto.Message): r""" + Attributes: autoscalers (Sequence[google.cloud.compute_v1.types.Autoscaler]): [Output Only] A list of autoscalers contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of autoscalers when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ autoscalers = proto.RepeatedField( @@ -6136,6 +6864,7 @@ class AutoscalersScopedList(proto.Message): class AutoscalingPolicy(proto.Message): r"""Cloud Autoscaler policy. + Attributes: cool_down_period_sec (int): The number of seconds that the autoscaler @@ -6150,33 +6879,46 @@ class AutoscalingPolicy(proto.Message): you test how long an instance may take to initialize. To do this, create an instance and time the startup process. + + This field is a member of `oneof`_ ``_cool_down_period_sec``. cpu_utilization (google.cloud.compute_v1.types.AutoscalingPolicyCpuUtilization): Defines the CPU utilization policy that allows the autoscaler to scale based on the average CPU utilization of a managed instance group. + + This field is a member of `oneof`_ ``_cpu_utilization``. custom_metric_utilizations (Sequence[google.cloud.compute_v1.types.AutoscalingPolicyCustomMetricUtilization]): Configuration parameters of autoscaling based on a custom metric. load_balancing_utilization (google.cloud.compute_v1.types.AutoscalingPolicyLoadBalancingUtilization): Configuration parameters of autoscaling based on load balancer. + + This field is a member of `oneof`_ ``_load_balancing_utilization``. max_num_replicas (int): The maximum number of instances that the autoscaler can scale out to. This is required when creating or updating an autoscaler. The maximum number of replicas must not be lower than minimal number of replicas. + + This field is a member of `oneof`_ ``_max_num_replicas``. min_num_replicas (int): The minimum number of replicas that the autoscaler can scale in to. This cannot be less than 0. If not provided, autoscaler chooses a default value depending on maximum number of instances allowed. + + This field is a member of `oneof`_ ``_min_num_replicas``. mode (google.cloud.compute_v1.types.AutoscalingPolicy.Mode): Defines operating mode for this policy. + + This field is a member of `oneof`_ ``_mode``. scale_in_control (google.cloud.compute_v1.types.AutoscalingPolicyScaleInControl): + This field is a member of `oneof`_ ``_scale_in_control``. scaling_schedules (Sequence[google.cloud.compute_v1.types.AutoscalingPolicy.ScalingSchedulesEntry]): Scaling schedules defined for an autoscaler. Multiple schedules can be set on an autoscaler, and they can overlap. @@ -6230,6 +6972,7 @@ class Mode(proto.Enum): class AutoscalingPolicyCpuUtilization(proto.Message): r"""CPU utilization policy. + Attributes: predictive_method (google.cloud.compute_v1.types.AutoscalingPolicyCpuUtilization.PredictiveMethod): Indicates whether predictive autoscaling based on CPU metric @@ -6239,6 +6982,8 @@ class AutoscalingPolicyCpuUtilization(proto.Message): OPTIMIZE_AVAILABILITY. Predictive autoscaling improves availability by monitoring daily and weekly load patterns and scaling out ahead of anticipated demand. + + This field is a member of `oneof`_ ``_predictive_method``. utilization_target (float): The target CPU utilization that the autoscaler maintains. Must be a float value in the range (0, 1]. If not specified, @@ -6251,6 +6996,8 @@ class AutoscalingPolicyCpuUtilization(proto.Message): reaches the maximum number of instances you specified or until the average utilization reaches the target utilization. + + This field is a member of `oneof`_ ``_utilization_target``. """ class PredictiveMethod(proto.Enum): @@ -6273,6 +7020,7 @@ class PredictiveMethod(proto.Enum): class AutoscalingPolicyCustomMetricUtilization(proto.Message): r"""Custom utilization metric policy. + Attributes: filter (str): A filter string, compatible with a Stackdriver Monitoring @@ -6303,11 +7051,15 @@ class AutoscalingPolicyCustomMetricUtilization(proto.Message): resource type). If multiple TimeSeries are returned upon the query execution, the autoscaler will sum their respective values to obtain its scaling value. + + This field is a member of `oneof`_ ``_filter``. metric (str): The identifier (type) of the Stackdriver Monitoring metric. The metric cannot have negative values. The metric must have a value type of INT64 or DOUBLE. + + This field is a member of `oneof`_ ``_metric``. single_instance_assignment (float): If scaling is based on a per-group metric value that represents the total amount of work to be done or resource @@ -6322,6 +7074,8 @@ class AutoscalingPolicyCustomMetricUtilization(proto.Message): exporting an average or median latency, since this value can't include a chunk assignable to a single instance, it could be better used with utilization_target instead. + + This field is a member of `oneof`_ ``_single_instance_assignment``. utilization_target (float): The target value of the metric that autoscaler maintains. This must be a positive value. A utilization metric scales @@ -6331,10 +7085,14 @@ class AutoscalingPolicyCustomMetricUtilization(proto.Message): https://www.googleapis.com/compute/v1/instance/network/received_bytes_count. The autoscaler works to keep this value constant for each of the instances. + + This field is a member of `oneof`_ ``_utilization_target``. utilization_target_type (google.cloud.compute_v1.types.AutoscalingPolicyCustomMetricUtilization.UtilizationTargetType): Defines how target utilization value is expressed for a Stackdriver Monitoring metric. Either GAUGE, DELTA_PER_SECOND, or DELTA_PER_MINUTE. + + This field is a member of `oneof`_ ``_utilization_target_type``. """ class UtilizationTargetType(proto.Enum): @@ -6368,6 +7126,8 @@ class AutoscalingPolicyLoadBalancingUtilization(proto.Message): in HTTP(S) load balancing configuration) that the autoscaler maintains. Must be a positive float value. If not defined, the default is 0.8. + + This field is a member of `oneof`_ ``_utilization_target``. """ utilization_target = proto.Field(proto.DOUBLE, number=215905870, optional=True,) @@ -6386,10 +7146,14 @@ class AutoscalingPolicyScaleInControl(proto.Message): recommendations. Possibly all these VMs can be deleted at once so user service needs to be prepared to lose that many VMs in one step. + + This field is a member of `oneof`_ ``_max_scaled_in_replicas``. time_window_sec (int): How far back autoscaling looks when computing recommendations to include directives regarding slower scale in, as described above. + + This field is a member of `oneof`_ ``_time_window_sec``. """ max_scaled_in_replicas = proto.Field( @@ -6407,22 +7171,30 @@ class AutoscalingPolicyScalingSchedule(proto.Message): Attributes: description (str): A description of a scaling schedule. + + This field is a member of `oneof`_ ``_description``. disabled (bool): A boolean value that specifies whether a scaling schedule can influence autoscaler recommendations. If set to true, then a scaling schedule has no effect. This field is optional, and its value is false by default. + + This field is a member of `oneof`_ ``_disabled``. duration_sec (int): The duration of time intervals, in seconds, for which this scaling schedule is to run. The minimum allowed value is 300. This field is required. + + This field is a member of `oneof`_ ``_duration_sec``. min_required_replicas (int): The minimum number of VM instances that the autoscaler will recommend in time intervals starting according to schedule. This field is required. + + This field is a member of `oneof`_ ``_min_required_replicas``. schedule (str): The start timestamps of time intervals when this scaling schedule is to provide a scaling signal. This field uses the @@ -6433,12 +7205,16 @@ class AutoscalingPolicyScalingSchedule(proto.Message): field is required. Note: These timestamps only describe when autoscaler starts providing the scaling signal. The VMs need additional time to become serving. + + This field is a member of `oneof`_ ``_schedule``. time_zone (str): The time zone to use when interpreting the schedule. The value of this field must be a time zone name from the tz database: http://en.wikipedia.org/wiki/Tz_database. This field is assigned a default value of ���UTC��� if left empty. + + This field is a member of `oneof`_ ``_time_zone``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -6451,6 +7227,7 @@ class AutoscalingPolicyScalingSchedule(proto.Message): class Backend(proto.Message): r"""Message containing information of one individual backend. + Attributes: balancing_mode (google.cloud.compute_v1.types.Backend.BalancingMode): Specifies how to determine whether the @@ -6468,6 +7245,8 @@ class Backend(proto.Message): Backend.maxUtilization is ignored when Backend.balancingMode is RATE. In the future, this incompatible combination will be rejected. + + This field is a member of `oneof`_ ``_balancing_mode``. capacity_scaler (float): A multiplier applied to the backend's target capacity of its balancing mode. The default value is 1, which means the @@ -6478,14 +7257,20 @@ class Backend(proto.Message): configure a setting larger than 0 and smaller than 0.1. You cannot configure a setting of 0 when there is only one backend attached to the backend service. + + This field is a member of `oneof`_ ``_capacity_scaler``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. failover (bool): This field designates whether this is a failover backend. More than one failover backend can be configured for a given BackendService. + + This field is a member of `oneof`_ ``_failover``. group (str): The fully-qualified URL of an instance group or network endpoint group (NEG) resource. To determine what types of @@ -6494,46 +7279,62 @@ class Backend(proto.Message): You must use the *fully-qualified* URL (starting with https://www.googleapis.com/) to specify the instance group or NEG. Partial URLs are not supported. + + This field is a member of `oneof`_ ``_group``. max_connections (int): Defines a target maximum number of simultaneous connections. For usage guidelines, see Connection balancing mode and Utilization balancing mode. Not available if the backend's balancingMode is RATE. + + This field is a member of `oneof`_ ``_max_connections``. max_connections_per_endpoint (int): Defines a target maximum number of simultaneous connections. For usage guidelines, see Connection balancing mode and Utilization balancing mode. Not available if the backend's balancingMode is RATE. + + This field is a member of `oneof`_ ``_max_connections_per_endpoint``. max_connections_per_instance (int): Defines a target maximum number of simultaneous connections. For usage guidelines, see Connection balancing mode and Utilization balancing mode. Not available if the backend's balancingMode is RATE. + + This field is a member of `oneof`_ ``_max_connections_per_instance``. max_rate (int): Defines a maximum number of HTTP requests per second (RPS). For usage guidelines, see Rate balancing mode and Utilization balancing mode. Not available if the backend's balancingMode is CONNECTION. + + This field is a member of `oneof`_ ``_max_rate``. max_rate_per_endpoint (float): Defines a maximum target for requests per second (RPS). For usage guidelines, see Rate balancing mode and Utilization balancing mode. Not available if the backend's balancingMode is CONNECTION. + + This field is a member of `oneof`_ ``_max_rate_per_endpoint``. max_rate_per_instance (float): Defines a maximum target for requests per second (RPS). For usage guidelines, see Rate balancing mode and Utilization balancing mode. Not available if the backend's balancingMode is CONNECTION. + + This field is a member of `oneof`_ ``_max_rate_per_instance``. max_utilization (float): Optional parameter to define a target capacity for the UTILIZATIONbalancing mode. The valid range is [0.0, 1.0]. For usage guidelines, see Utilization balancing mode. + + This field is a member of `oneof`_ ``_max_utilization``. """ class BalancingMode(proto.Enum): @@ -6582,11 +7383,17 @@ class BackendBucket(proto.Message): Attributes: bucket_name (str): Cloud Storage bucket name. + + This field is a member of `oneof`_ ``_bucket_name``. cdn_policy (google.cloud.compute_v1.types.BackendBucketCdnPolicy): Cloud CDN configuration for this BackendBucket. + + This field is a member of `oneof`_ ``_cdn_policy``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. custom_response_headers (Sequence[str]): Headers that the HTTP/S load balancer should add to proxied responses. @@ -6594,14 +7401,22 @@ class BackendBucket(proto.Message): An optional textual description of the resource; provided by the client when the resource is created. + + This field is a member of `oneof`_ ``_description``. enable_cdn (bool): If true, enable Cloud CDN for this BackendBucket. + + This field is a member of `oneof`_ ``_enable_cdn``. id (int): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): Type of the resource. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -6611,8 +7426,12 @@ class BackendBucket(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ bucket_name = proto.Field(proto.STRING, number=283610048, optional=True,) @@ -6660,6 +7479,8 @@ class BackendBucketCdnPolicy(proto.Message): and CSS). Requests and responses that are marked as uncacheable, as well as dynamic content (including HTML), will not be cached. + + This field is a member of `oneof`_ ``_cache_mode``. client_ttl (int): Specifies a separate client (e.g. browser client) maximum TTL. This is used to clamp the max-age (or Expires) value @@ -6673,6 +7494,8 @@ class BackendBucketCdnPolicy(proto.Message): directive is present. If a client TTL is not specified, a default value (1 hour) will be used. The maximum allowed value is 86400s (1 day). + + This field is a member of `oneof`_ ``_client_ttl``. default_ttl (int): Specifies the default TTL for cached content served by this origin for responses that do not have an existing valid TTL @@ -6684,6 +7507,8 @@ class BackendBucketCdnPolicy(proto.Message): value is 31,622,400s (1 year), noting that infrequently accessed objects may be evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_default_ttl``. max_ttl (int): Specifies the maximum allowed TTL for cached content served by this origin. Cache directives @@ -6697,6 +7522,8 @@ class BackendBucketCdnPolicy(proto.Message): maximum allowed value is 31,622,400s (1 year), noting that infrequently accessed objects may be evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_max_ttl``. negative_caching (bool): Negative caching allows per-status code TTLs to be set, in order to apply fine-grained caching for common errors or @@ -6715,6 +7542,8 @@ class BackendBucketCdnPolicy(proto.Message): Reasons): 120s HTTP 405 (Method Not Found), 421 (Misdirected Request), 501 (Not Implemented): 60s. These defaults can be overridden in negative_caching_policy. + + This field is a member of `oneof`_ ``_negative_caching``. negative_caching_policy (Sequence[google.cloud.compute_v1.types.BackendBucketCdnPolicyNegativeCachingPolicy]): Sets a cache TTL for the specified HTTP status code. negative_caching must be enabled to configure @@ -6729,6 +7558,8 @@ class BackendBucketCdnPolicy(proto.Message): If true then Cloud CDN will combine multiple concurrent cache fill requests into a small number of requests to the origin. + + This field is a member of `oneof`_ ``_request_coalescing``. serve_while_stale (int): Serve existing content from the cache (if available) when revalidating content with the @@ -6744,6 +7575,8 @@ class BackendBucketCdnPolicy(proto.Message): cached response. The maximum allowed value is 604800 (1 week). Set this to zero (0) to disable serve-while-stale. + + This field is a member of `oneof`_ ``_serve_while_stale``. signed_url_cache_max_age_sec (int): Maximum number of seconds the response to a signed URL request will be considered fresh. After this time period, @@ -6754,6 +7587,8 @@ class BackendBucketCdnPolicy(proto.Message): max-age=[TTL]" header, regardless of any existing Cache-Control header. The actual headers served in responses will not be altered. + + This field is a member of `oneof`_ ``_signed_url_cache_max_age_sec``. signed_url_key_names (Sequence[str]): [Output Only] Names of the keys for signing request URLs. """ @@ -6814,6 +7649,8 @@ class BackendBucketCdnPolicyBypassCacheOnRequestHeader(proto.Message): header_name (str): The header field name to match on when bypassing cache. Values are case-insensitive. + + This field is a member of `oneof`_ ``_header_name``. """ header_name = proto.Field(proto.STRING, number=110223613, optional=True,) @@ -6821,6 +7658,7 @@ class BackendBucketCdnPolicyBypassCacheOnRequestHeader(proto.Message): class BackendBucketCdnPolicyNegativeCachingPolicy(proto.Message): r"""Specify CDN TTLs for response error codes. + Attributes: code (int): The HTTP status code to define a TTL against. @@ -6828,12 +7666,16 @@ class BackendBucketCdnPolicyNegativeCachingPolicy(proto.Message): 404, 405, 410, 421, 451 and 501 are can be specified as values, and you cannot specify a status code more than once. + + This field is a member of `oneof`_ ``_code``. ttl (int): The TTL (in seconds) for which to cache responses with the corresponding status code. The maximum allowed value is 1800s (30 minutes), noting that infrequently accessed objects may be evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_ttl``. """ code = proto.Field(proto.INT32, number=3059181, optional=True,) @@ -6842,14 +7684,19 @@ class BackendBucketCdnPolicyNegativeCachingPolicy(proto.Message): class BackendBucketList(proto.Message): r"""Contains a list of BackendBucket resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.BackendBucket]): A list of BackendBucket resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -6857,10 +7704,16 @@ class BackendBucketList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -6905,6 +7758,8 @@ class BackendService(proto.Message): Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_affinity_cookie_ttl_sec``. backends (Sequence[google.cloud.compute_v1.types.Backend]): The list of backends that serve this BackendService. @@ -6912,10 +7767,14 @@ class BackendService(proto.Message): Cloud CDN configuration for this BackendService. Only available for specified load balancer types. + + This field is a member of `oneof`_ ``_cdn_policy``. circuit_breakers (google.cloud.compute_v1.types.CircuitBreakers): + This field is a member of `oneof`_ ``_circuit_breakers``. connection_draining (google.cloud.compute_v1.types.ConnectionDraining): + This field is a member of `oneof`_ ``_connection_draining``. consistent_hash (google.cloud.compute_v1.types.ConsistentHashLoadBalancerSettings): Consistent Hash-based load balancing can be used to provide soft session affinity based on HTTP headers, cookies or @@ -6933,8 +7792,12 @@ class BackendService(proto.Message): supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_consistent_hash``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. custom_request_headers (Sequence[str]): Headers that the load balancer adds to proxied requests. See `Creating custom @@ -6947,9 +7810,13 @@ class BackendService(proto.Message): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. enable_c_d_n (bool): If true, enables Cloud CDN for the backend service of an external HTTP(S) load balancer. + + This field is a member of `oneof`_ ``_enable_c_d_n``. failover_policy (google.cloud.compute_v1.types.BackendServiceFailoverPolicy): Requires at least one backend instance group to be defined as a backup (failover) backend. For load balancers that have @@ -6957,6 +7824,8 @@ class BackendService(proto.Message): Balancing `__ and `external TCP/UDP Load Balancing `__. + + This field is a member of `oneof`_ ``_failover_policy``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -6967,6 +7836,8 @@ class BackendService(proto.Message): will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a BackendService. + + This field is a member of `oneof`_ ``_fingerprint``. health_checks (Sequence[str]): The list of URLs to the healthChecks, httpHealthChecks (legacy), or httpsHealthChecks @@ -6984,17 +7855,25 @@ class BackendService(proto.Message): on this resource. Not available for Internal TCP/UDP Load Balancing and Network Load Balancing. + + This field is a member of `oneof`_ ``_iap``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#backendService for backend services. + + This field is a member of `oneof`_ ``_kind``. load_balancing_scheme (google.cloud.compute_v1.types.BackendService.LoadBalancingScheme): Specifies the load balancer type. A backend service created for one type of load balancer cannot be used with another. For more information, refer to Choosing a load balancer. + + This field is a member of `oneof`_ ``_load_balancing_scheme``. locality_lb_policy (google.cloud.compute_v1.types.BackendService.LocalityLbPolicy): The load balancing algorithm used within the scope of the locality. The possible values are: - ROUND_ROBIN: This is a @@ -7026,11 +7905,15 @@ class BackendService(proto.Message): ROUND_ROBIN policy is supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_locality_lb_policy``. log_config (google.cloud.compute_v1.types.BackendServiceLogConfig): This field denotes the logging options for the load balancer traffic served by this backend service. If logging is enabled, logs will be exported to Stackdriver. + + This field is a member of `oneof`_ ``_log_config``. max_stream_duration (google.cloud.compute_v1.types.Duration): Specifies the default maximum duration (timeout) for streams to this service. Duration is computed from the beginning of @@ -7042,6 +7925,8 @@ class BackendService(proto.Message): the UrlMap that references this backend service. This field is only allowed when the loadBalancingScheme of the backend service is INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_max_stream_duration``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -7051,11 +7936,15 @@ class BackendService(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (str): The URL of the network to which this backend service belongs. This field can only be specified when the load balancing scheme is set to INTERNAL. + + This field is a member of `oneof`_ ``_network``. outlier_detection (google.cloud.compute_v1.types.OutlierDetection): Settings controlling the eviction of unhealthy hosts from the load balancing pool for the backend service. If not set, @@ -7067,11 +7956,15 @@ class BackendService(proto.Message): INTERNAL_SELF_MANAGED. Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_outlier_detection``. port (int): Deprecated in favor of portName. The TCP port to connect on the backend. The default value is 80. For Internal TCP/UDP Load Balancing and Network Load Balancing, omit port. + + This field is a member of `oneof`_ ``_port``. port_name (str): A named port on a backend instance group representing the port for communication to the backend VMs in that group. The @@ -7080,6 +7973,8 @@ class BackendService(proto.Message): This parameter has no meaning if the backends are NEGs. For Internal TCP/UDP Load Balancing and Network Load Balancing, omit port_name. + + This field is a member of `oneof`_ ``_port_name``. protocol (google.cloud.compute_v1.types.BackendService.Protocol): The protocol this BackendService uses to communicate with backends. Possible values are @@ -7091,15 +7986,21 @@ class BackendService(proto.Message): set to GRPC when the backend service is referenced by a URL map that is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_protocol``. region (str): [Output Only] URL of the region where the regional backend service resides. This field is not applicable to global backend services. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. security_policy (str): [Output Only] The resource URL for the security policy associated with this backend service. + + This field is a member of `oneof`_ ``_security_policy``. security_settings (google.cloud.compute_v1.types.SecuritySettings): This field specifies the security policy that applies to this backend service. This field is applicable to either: - @@ -7107,8 +8008,12 @@ class BackendService(proto.Message): HTTP, HTTPS, or HTTP2, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the load_balancing_scheme set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_security_settings``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. session_affinity (google.cloud.compute_v1.types.BackendService.SessionAffinity): Type of session affinity to use. The default is NONE. For a detailed description of session affinity options, see: @@ -7117,13 +8022,18 @@ class BackendService(proto.Message): Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_session_affinity``. subsetting (google.cloud.compute_v1.types.Subsetting): + This field is a member of `oneof`_ ``_subsetting``. timeout_sec (int): Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. Instead, use maxStreamDuration. + + This field is a member of `oneof`_ ``_timeout_sec``. """ class LoadBalancingScheme(proto.Enum): @@ -7290,15 +8200,20 @@ class SessionAffinity(proto.Enum): class BackendServiceAggregatedList(proto.Message): r"""Contains a list of BackendServicesScopedList. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.BackendServiceAggregatedList.ItemsEntry]): A list of BackendServicesScopedList resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -7306,12 +8221,18 @@ class BackendServiceAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -7347,6 +8268,8 @@ class BackendServiceCdnPolicy(proto.Message): cdnPolicy.cacheMode settings. cache_key_policy (google.cloud.compute_v1.types.CacheKeyPolicy): The CacheKeyPolicy for this CdnPolicy. + + This field is a member of `oneof`_ ``_cache_key_policy``. cache_mode (google.cloud.compute_v1.types.BackendServiceCdnPolicy.CacheMode): Specifies the cache setting for all responses from this backend. The possible values are: USE_ORIGIN_HEADERS @@ -7364,6 +8287,8 @@ class BackendServiceCdnPolicy(proto.Message): and CSS). Requests and responses that are marked as uncacheable, as well as dynamic content (including HTML), will not be cached. + + This field is a member of `oneof`_ ``_cache_mode``. client_ttl (int): Specifies a separate client (e.g. browser client) maximum TTL. This is used to clamp the max-age (or Expires) value @@ -7377,6 +8302,8 @@ class BackendServiceCdnPolicy(proto.Message): directive is present. If a client TTL is not specified, a default value (1 hour) will be used. The maximum allowed value is 86400s (1 day). + + This field is a member of `oneof`_ ``_client_ttl``. default_ttl (int): Specifies the default TTL for cached content served by this origin for responses that do not have an existing valid TTL @@ -7388,6 +8315,8 @@ class BackendServiceCdnPolicy(proto.Message): value is 31,622,400s (1 year), noting that infrequently accessed objects may be evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_default_ttl``. max_ttl (int): Specifies the maximum allowed TTL for cached content served by this origin. Cache directives @@ -7401,6 +8330,8 @@ class BackendServiceCdnPolicy(proto.Message): maximum allowed value is 31,622,400s (1 year), noting that infrequently accessed objects may be evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_max_ttl``. negative_caching (bool): Negative caching allows per-status code TTLs to be set, in order to apply fine-grained caching for common errors or @@ -7419,6 +8350,8 @@ class BackendServiceCdnPolicy(proto.Message): Reasons): 120s HTTP 405 (Method Not Found), 421 (Misdirected Request), 501 (Not Implemented): 60s. These defaults can be overridden in negative_caching_policy. + + This field is a member of `oneof`_ ``_negative_caching``. negative_caching_policy (Sequence[google.cloud.compute_v1.types.BackendServiceCdnPolicyNegativeCachingPolicy]): Sets a cache TTL for the specified HTTP status code. negative_caching must be enabled to configure @@ -7433,6 +8366,8 @@ class BackendServiceCdnPolicy(proto.Message): If true then Cloud CDN will combine multiple concurrent cache fill requests into a small number of requests to the origin. + + This field is a member of `oneof`_ ``_request_coalescing``. serve_while_stale (int): Serve existing content from the cache (if available) when revalidating content with the @@ -7448,6 +8383,8 @@ class BackendServiceCdnPolicy(proto.Message): cached response. The maximum allowed value is 604800 (1 week). Set this to zero (0) to disable serve-while-stale. + + This field is a member of `oneof`_ ``_serve_while_stale``. signed_url_cache_max_age_sec (int): Maximum number of seconds the response to a signed URL request will be considered fresh. After this time period, @@ -7458,6 +8395,8 @@ class BackendServiceCdnPolicy(proto.Message): max-age=[TTL]" header, regardless of any existing Cache-Control header. The actual headers served in responses will not be altered. + + This field is a member of `oneof`_ ``_signed_url_cache_max_age_sec``. signed_url_key_names (Sequence[str]): [Output Only] Names of the keys for signing request URLs. """ @@ -7521,6 +8460,8 @@ class BackendServiceCdnPolicyBypassCacheOnRequestHeader(proto.Message): header_name (str): The header field name to match on when bypassing cache. Values are case-insensitive. + + This field is a member of `oneof`_ ``_header_name``. """ header_name = proto.Field(proto.STRING, number=110223613, optional=True,) @@ -7528,6 +8469,7 @@ class BackendServiceCdnPolicyBypassCacheOnRequestHeader(proto.Message): class BackendServiceCdnPolicyNegativeCachingPolicy(proto.Message): r"""Specify CDN TTLs for response error codes. + Attributes: code (int): The HTTP status code to define a TTL against. @@ -7535,12 +8477,16 @@ class BackendServiceCdnPolicyNegativeCachingPolicy(proto.Message): 404, 405, 410, 421, 451 and 501 are can be specified as values, and you cannot specify a status code more than once. + + This field is a member of `oneof`_ ``_code``. ttl (int): The TTL (in seconds) for which to cache responses with the corresponding status code. The maximum allowed value is 1800s (30 minutes), noting that infrequently accessed objects may be evicted from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_ttl``. """ code = proto.Field(proto.INT32, number=3059181, optional=True,) @@ -7565,6 +8511,8 @@ class BackendServiceFailoverPolicy(proto.Message): disable_connection_drain_on_failover (bool): This can be set to true only if the protocol is TCP. The default is false. + + This field is a member of `oneof`_ ``_disable_connection_drain_on_failover``. drop_traffic_if_unhealthy (bool): If set to true, connections to the load balancer are dropped when all primary and all backup backend VMs are unhealthy.If @@ -7576,6 +8524,8 @@ class BackendServiceFailoverPolicy(proto.Message): and `external TCP/UDP Load Balancing `__. The default is false. + + This field is a member of `oneof`_ ``_drop_traffic_if_unhealthy``. failover_ratio (float): The value of the field must be in the range [0, 1]. If the value is 0, the load balancer performs a failover when the @@ -7587,6 +8537,8 @@ class BackendServiceFailoverPolicy(proto.Message): Balancing `__ and `external TCP/UDP Load Balancing `__. + + This field is a member of `oneof`_ ``_failover_ratio``. """ disable_connection_drain_on_failover = proto.Field( @@ -7600,6 +8552,7 @@ class BackendServiceFailoverPolicy(proto.Message): class BackendServiceGroupHealth(proto.Message): r""" + Attributes: annotations (Sequence[google.cloud.compute_v1.types.BackendServiceGroupHealth.AnnotationsEntry]): Metadata defined as annotations on the @@ -7613,6 +8566,8 @@ class BackendServiceGroupHealth(proto.Message): [Output Only] Type of resource. Always compute#backendServiceGroupHealth for the health of backend services. + + This field is a member of `oneof`_ ``_kind``. """ annotations = proto.MapField(proto.STRING, proto.STRING, number=112032548,) @@ -7624,24 +8579,33 @@ class BackendServiceGroupHealth(proto.Message): class BackendServiceIAP(proto.Message): r"""Identity-Aware Proxy + Attributes: enabled (bool): Whether the serving infrastructure will authenticate and authorize all incoming requests. If true, the oauth2ClientId and oauth2ClientSecret fields must be non-empty. + + This field is a member of `oneof`_ ``_enabled``. oauth2_client_id (str): OAuth2 client ID to use for the authentication flow. + + This field is a member of `oneof`_ ``_oauth2_client_id``. oauth2_client_secret (str): OAuth2 client secret to use for the authentication flow. For security reasons, this value cannot be retrieved via the API. Instead, the SHA-256 hash of the value is returned in the oauth2ClientSecretSha256 field. @InputOnly + + This field is a member of `oneof`_ ``_oauth2_client_secret``. oauth2_client_secret_sha256 (str): [Output Only] SHA256 hash value for the field oauth2_client_secret above. + + This field is a member of `oneof`_ ``_oauth2_client_secret_sha256``. """ enabled = proto.Field(proto.BOOL, number=1018689, optional=True,) @@ -7654,15 +8618,20 @@ class BackendServiceIAP(proto.Message): class BackendServiceList(proto.Message): r"""Contains a list of BackendService resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.BackendService]): A list of BackendService resources. kind (str): [Output Only] Type of resource. Always compute#backendServiceList for lists of backend services. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -7670,10 +8639,16 @@ class BackendServiceList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -7701,6 +8676,8 @@ class BackendServiceLogConfig(proto.Message): This field denotes whether to enable logging for the load balancer traffic served by this backend service. + + This field is a member of `oneof`_ ``_enable``. sample_rate (float): This field can only be specified if logging is enabled for this backend service. The value of the field must be in [0, @@ -7708,6 +8685,8 @@ class BackendServiceLogConfig(proto.Message): load balancer where 1.0 means all logged requests are reported and 0.0 means no logged requests are reported. The default value is 1.0. + + This field is a member of `oneof`_ ``_sample_rate``. """ enable = proto.Field(proto.BOOL, number=311764355, optional=True,) @@ -7716,9 +8695,11 @@ class BackendServiceLogConfig(proto.Message): class BackendServiceReference(proto.Message): r""" + Attributes: backend_service (str): + This field is a member of `oneof`_ ``_backend_service``. """ backend_service = proto.Field(proto.STRING, number=306946058, optional=True,) @@ -7726,6 +8707,7 @@ class BackendServiceReference(proto.Message): class BackendServicesScopedList(proto.Message): r""" + Attributes: backend_services (Sequence[google.cloud.compute_v1.types.BackendService]): A list of BackendServices contained in this @@ -7733,6 +8715,8 @@ class BackendServicesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ backend_services = proto.RepeatedField( @@ -7745,10 +8729,13 @@ class BackendServicesScopedList(proto.Message): class Binding(proto.Message): r"""Associates ``members`` with a ``role``. + Attributes: binding_id (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_binding_id``. condition (google.cloud.compute_v1.types.Expr): The condition that is associated with this binding. If the condition evaluates to ``true``, then this binding applies @@ -7759,6 +8746,8 @@ class Binding(proto.Message): learn which resources support conditions in their IAM policies, see the `IAM documentation `__. + + This field is a member of `oneof`_ ``_condition``. members (Sequence[str]): Specifies the identities requesting access for a Cloud Platform resource. ``members`` can have the following @@ -7799,6 +8788,8 @@ class Binding(proto.Message): role (str): Role that is assigned to ``members``. For example, ``roles/viewer``, ``roles/editor``, or ``roles/owner``. + + This field is a member of `oneof`_ ``_role``. """ binding_id = proto.Field(proto.STRING, number=441088277, optional=True,) @@ -7834,6 +8825,8 @@ class BulkInsertInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -7854,18 +8847,26 @@ class BulkInsertInstanceResource(proto.Message): Attributes: count (int): The maximum number of instances to create. + + This field is a member of `oneof`_ ``_count``. instance_properties (google.cloud.compute_v1.types.InstanceProperties): The instance properties defining the VM instances to be created. Required if sourceInstanceTemplate is not provided. + + This field is a member of `oneof`_ ``_instance_properties``. location_policy (google.cloud.compute_v1.types.LocationPolicy): Policy for chosing target zone. + + This field is a member of `oneof`_ ``_location_policy``. min_count (int): The minimum number of instances to create. If no min_count is specified then count is used as the default value. If min_count instances cannot be created, then no instances will be created and instances already created will be deleted. + + This field is a member of `oneof`_ ``_min_count``. name_pattern (str): The string pattern used for the names of the VMs. Either name_pattern or per_instance_properties must be set. The @@ -7880,6 +8881,8 @@ class BulkInsertInstanceResource(proto.Message): with name inst-0050, then instance names generated using the pattern inst-#### begin with inst-0051. The name pattern placeholder #...# can contain up to 18 characters. + + This field is a member of `oneof`_ ``_name_pattern``. per_instance_properties (Sequence[google.cloud.compute_v1.types.BulkInsertInstanceResource.PerInstancePropertiesEntry]): Per-instance properties to be set on individual instances. Keys of this map specify requested instance names. Can be @@ -7899,6 +8902,8 @@ class BulkInsertInstanceResource(proto.Message): projects/project/global/instanceTemplates/instanceTemplate - global/instanceTemplates/instanceTemplate This field is optional. + + This field is a member of `oneof`_ ``_source_instance_template``. """ count = proto.Field(proto.INT64, number=94851343, optional=True,) @@ -7929,6 +8934,8 @@ class BulkInsertInstanceResourcePerInstanceProperties(proto.Message): name (str): This field is only temporary. It will be removed. Do not use it. + + This field is a member of `oneof`_ ``_name``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -7961,6 +8968,8 @@ class BulkInsertRegionInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ bulk_insert_instance_resource_resource = proto.Field( @@ -7973,13 +8982,17 @@ class BulkInsertRegionInstanceRequest(proto.Message): class CacheInvalidationRule(proto.Message): r""" + Attributes: host (str): If set, this invalidation rule will only apply to requests with a Host header matching host. + + This field is a member of `oneof`_ ``_host``. path (str): + This field is a member of `oneof`_ ``_path``. """ host = proto.Field(proto.STRING, number=3208616, optional=True,) @@ -7994,15 +9007,21 @@ class CacheKeyPolicy(proto.Message): include_host (bool): If true, requests to different hosts will be cached separately. + + This field is a member of `oneof`_ ``_include_host``. include_protocol (bool): If true, http and https requests will be cached separately. + + This field is a member of `oneof`_ ``_include_protocol``. include_query_string (bool): If true, include query string parameters in the cache key according to query_string_whitelist and query_string_blacklist. If neither is set, the entire query string will be included. If false, the query string will be excluded from the cache key entirely. + + This field is a member of `oneof`_ ``_include_query_string``. query_string_blacklist (Sequence[str]): Names of query string parameters to exclude in cache keys. All other parameters will be included. Either specify @@ -8034,25 +9053,35 @@ class CircuitBreakers(proto.Message): referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_connections``. max_pending_requests (int): Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_pending_requests``. max_requests (int): The maximum number of parallel requests that allowed to the backend service. If not specified, there is no limit. + + This field is a member of `oneof`_ ``_max_requests``. max_requests_per_connection (int): Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_requests_per_connection``. max_retries (int): Not supported when the backend service is referenced by a URL map that is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_max_retries``. """ max_connections = proto.Field(proto.INT32, number=110652154, optional=True,) @@ -8087,8 +9116,12 @@ class CloneRulesFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. source_firewall_policy (str): The firewall policy from which to copy rules. + + This field is a member of `oneof`_ ``_source_firewall_policy``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -8114,23 +9147,37 @@ class Commitment(proto.Message): listed in licenseResources. Note that only MACHINE commitments should have a Type specified. + + This field is a member of `oneof`_ ``_category``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. end_timestamp (str): [Output Only] Commitment end time in RFC3339 text format. + + This field is a member of `oneof`_ ``_end_timestamp``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#commitment for commitments. + + This field is a member of `oneof`_ ``_kind``. license_resource (google.cloud.compute_v1.types.LicenseResourceCommitment): The license specification required as part of a license commitment. + + This field is a member of `oneof`_ ``_license_resource``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -8140,13 +9187,19 @@ class Commitment(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. plan (google.cloud.compute_v1.types.Commitment.Plan): The plan for this commitment, which determines duration and discount rate. The currently supported plans are TWELVE_MONTH (1 year), and THIRTY_SIX_MONTH (3 years). + + This field is a member of `oneof`_ ``_plan``. region (str): [Output Only] URL of the region where this commitment may be used. + + This field is a member of `oneof`_ ``_region``. reservations (Sequence[google.cloud.compute_v1.types.Reservation]): List of reservations in this commitment. resources (Sequence[google.cloud.compute_v1.types.ResourceCommitment]): @@ -8155,22 +9208,32 @@ class Commitment(proto.Message): commitments must occur together. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. start_timestamp (str): [Output Only] Commitment start time in RFC3339 text format. + + This field is a member of `oneof`_ ``_start_timestamp``. status (google.cloud.compute_v1.types.Commitment.Status): [Output Only] Status of the commitment with regards to eventual expiration (each commitment has an end date defined). One of the following values: NOT_YET_ACTIVE, ACTIVE, EXPIRED. + + This field is a member of `oneof`_ ``_status``. status_message (str): [Output Only] An optional, human-readable explanation of the status. + + This field is a member of `oneof`_ ``_status_message``. type_ (google.cloud.compute_v1.types.Commitment.Type): The type of commitment, which affects the discount rate and the eligible resources. Type MEMORY_OPTIMIZED specifies a commitment that will only apply to memory optimized machines. Type ACCELERATOR_OPTIMIZED specifies a commitment that will only apply to accelerator optimized machines. + + This field is a member of `oneof`_ ``_type``. """ class Category(proto.Enum): @@ -8254,16 +9317,21 @@ class Type(proto.Enum): class CommitmentAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.CommitmentAggregatedList.ItemsEntry]): A list of CommitmentsScopedList resources. kind (str): [Output Only] Type of resource. Always compute#commitmentAggregatedList for aggregated lists of commitments. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -8271,12 +9339,18 @@ class CommitmentAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -8298,15 +9372,20 @@ def raw_page(self): class CommitmentList(proto.Message): r"""Contains a list of Commitment resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Commitment]): A list of Commitment resources. kind (str): [Output Only] Type of resource. Always compute#commitmentList for lists of commitments. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -8314,10 +9393,16 @@ class CommitmentList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -8336,12 +9421,15 @@ def raw_page(self): class CommitmentsScopedList(proto.Message): r""" + Attributes: commitments (Sequence[google.cloud.compute_v1.types.Commitment]): [Output Only] A list of commitments contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of commitments when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ commitments = proto.RepeatedField( @@ -8354,19 +9442,28 @@ class CommitmentsScopedList(proto.Message): class Condition(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: iam (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_iam``. op (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_op``. svc (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_svc``. sys (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_sys``. values (Sequence[str]): This is deprecated and has no effect. Do not use. @@ -8381,10 +9478,13 @@ class Condition(proto.Message): class ConfidentialInstanceConfig(proto.Message): r"""A set of Confidential Instance options. + Attributes: enable_confidential_compute (bool): Defines whether the instance should have confidential compute enabled. + + This field is a member of `oneof`_ ``_enable_confidential_compute``. """ enable_confidential_compute = proto.Field( @@ -8394,12 +9494,15 @@ class ConfidentialInstanceConfig(proto.Message): class ConnectionDraining(proto.Message): r"""Message containing connection draining configuration. + Attributes: draining_timeout_sec (int): Configures a duration timeout for existing requests on a removed backend instance. For supported load balancers and protocols, as described in Enabling connection draining. + + This field is a member of `oneof`_ ``_draining_timeout_sec``. """ draining_timeout_sec = proto.Field(proto.INT32, number=225127070, optional=True,) @@ -8416,10 +9519,14 @@ class ConsistentHashLoadBalancerSettings(proto.Message): hash load balancer. If the cookie is not present, it will be generated. This field is applicable if the sessionAffinity is set to HTTP_COOKIE. + + This field is a member of `oneof`_ ``_http_cookie``. http_header_name (str): The hash based on the value of the specified header field. This field is applicable if the sessionAffinity is set to HEADER_FIELD. + + This field is a member of `oneof`_ ``_http_header_name``. minimum_ring_size (int): The minimum number of virtual nodes to use for the hash ring. Defaults to 1024. Larger ring @@ -8428,6 +9535,8 @@ class ConsistentHashLoadBalancerSettings(proto.Message): load balancing pool is larger than the ring size, each host will be assigned a single virtual node. + + This field is a member of `oneof`_ ``_minimum_ring_size``. """ http_cookie = proto.Field( @@ -8448,10 +9557,16 @@ class ConsistentHashLoadBalancerSettingsHttpCookie(proto.Message): Attributes: name (str): Name of the cookie. + + This field is a member of `oneof`_ ``_name``. path (str): Path to set for the cookie. + + This field is a member of `oneof`_ ``_path``. ttl (google.cloud.compute_v1.types.Duration): Lifetime of the cookie. + + This field is a member of `oneof`_ ``_ttl``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -8471,6 +9586,8 @@ class CorsPolicy(proto.Message): can include user credentials. This translates to the Access-Control-Allow-Credentials header. Default is false. + + This field is a member of `oneof`_ ``_allow_credentials``. allow_headers (Sequence[str]): Specifies the content for the Access-Control- llow-Headers header. @@ -8493,6 +9610,8 @@ class CorsPolicy(proto.Message): If true, specifies the CORS policy is disabled. The default value of false, which indicates that the CORS policy is in effect. + + This field is a member of `oneof`_ ``_disabled``. expose_headers (Sequence[str]): Specifies the content for the Access-Control- xpose-Headers header. @@ -8500,6 +9619,8 @@ class CorsPolicy(proto.Message): Specifies how long results of a preflight request can be cached in seconds. This translates to the Access-Control-Max-Age header. + + This field is a member of `oneof`_ ``_max_age``. """ allow_credentials = proto.Field(proto.BOOL, number=481263366, optional=True,) @@ -8539,6 +9660,8 @@ class CreateInstancesInstanceGroupManagerRequest(proto.Message): with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. It should conform to @@ -8588,6 +9711,8 @@ class CreateInstancesRegionInstanceGroupManagerRequest(proto.Message): with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -8613,6 +9738,8 @@ class CreateSnapshotDiskRequest(proto.Message): snapshot by informing the OS to prepare for the snapshot process. Currently only supported on Windows instances using the Volume Shadow Copy Service (VSS). + + This field is a member of `oneof`_ ``_guest_flush``. project (str): Project ID for this request. request_id (str): @@ -8631,6 +9758,8 @@ class CreateSnapshotDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. snapshot_resource (google.cloud.compute_v1.types.Snapshot): The body resource for this request zone (str): @@ -8675,6 +9804,8 @@ class CreateSnapshotRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. snapshot_resource (google.cloud.compute_v1.types.Snapshot): The body resource for this request """ @@ -8690,18 +9821,23 @@ class CreateSnapshotRegionDiskRequest(proto.Message): class CustomerEncryptionKey(proto.Message): r""" + Attributes: kms_key_name (str): The name of the encryption key that is stored in Google Cloud KMS. For example: "kmsKeyName": "projects/kms_project_id/locations/region/keyRings/ key_region/cryptoKeys/key + + This field is a member of `oneof`_ ``_kms_key_name``. kms_key_service_account (str): The service account being used for the encryption request for the given KMS key. If absent, the Compute Engine default service account is used. For example: "kmsKeyServiceAccount": "name@project_id.iam.gserviceaccount.com/ + + This field is a member of `oneof`_ ``_kms_key_service_account``. raw_key (str): Specifies a 256-bit customer-supplied encryption key, encoded in RFC 4648 base64 to @@ -8709,6 +9845,8 @@ class CustomerEncryptionKey(proto.Message): provide either the rawKey or the rsaEncryptedKey. For example: "rawKey": "SGVsbG8gZnJvbSBHb29nbGUgQ2xvdWQgUGxhdGZvcm0=". + + This field is a member of `oneof`_ ``_raw_key``. rsa_encrypted_key (str): Specifies an RFC 4648 base64 encoded, RSA- rapped 2048-bit customer-supplied encryption key @@ -8728,10 +9866,14 @@ class CustomerEncryptionKey(proto.Message): https://cloud- certs.storage.googleapis.com/google-cloud-csek- ingress.pem + + This field is a member of `oneof`_ ``_rsa_encrypted_key``. sha256 (str): [Output only] The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied encryption key that protects this resource. + + This field is a member of `oneof`_ ``_sha256``. """ kms_key_name = proto.Field(proto.STRING, number=484373913, optional=True,) @@ -8745,15 +9887,20 @@ class CustomerEncryptionKey(proto.Message): class CustomerEncryptionKeyProtectedDisk(proto.Message): r""" + Attributes: disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): Decrypts data associated with the disk with a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_disk_encryption_key``. source (str): Specifies a valid partial or full URL to an existing Persistent Disk resource. This field is only applicable for persistent disks. For example: "source": "/compute/v1/projects/project_id/zones/zone/disks/ disk_name + + This field is a member of `oneof`_ ``_source``. """ disk_encryption_key = proto.Field( @@ -8764,6 +9911,7 @@ class CustomerEncryptionKeyProtectedDisk(proto.Message): class Data(proto.Message): r""" + Attributes: key (str): [Output Only] A key that provides more detail on the warning @@ -8775,8 +9923,12 @@ class Data(proto.Message): invalid network settings (for example, if an instance attempts to perform IP forwarding but is not enabled for IP forwarding). + + This field is a member of `oneof`_ ``_key``. value (str): [Output Only] A warning data value corresponding to the key. + + This field is a member of `oneof`_ ``_value``. """ key = proto.Field(proto.STRING, number=106079, optional=True,) @@ -8812,6 +9964,8 @@ class DeleteAccessConfigInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -8851,6 +10005,8 @@ class DeleteAddressRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ address = proto.Field(proto.STRING, number=462920692,) @@ -8884,6 +10040,8 @@ class DeleteAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): Name of the zone for this request. """ @@ -8919,6 +10077,8 @@ class DeleteBackendBucketRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_bucket = proto.Field(proto.STRING, number=91714037,) @@ -8952,6 +10112,8 @@ class DeleteBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -8984,6 +10146,8 @@ class DeleteDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -9019,6 +10183,8 @@ class DeleteExternalVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ external_vpn_gateway = proto.Field(proto.STRING, number=109898629,) @@ -9049,6 +10215,8 @@ class DeleteFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -9080,6 +10248,8 @@ class DeleteFirewallRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall = proto.Field(proto.STRING, number=511016192,) @@ -9115,6 +10285,8 @@ class DeleteForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ forwarding_rule = proto.Field(proto.STRING, number=269964030,) @@ -9148,6 +10320,8 @@ class DeleteGlobalAddressRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ address = proto.Field(proto.STRING, number=462920692,) @@ -9181,6 +10355,8 @@ class DeleteGlobalForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ forwarding_rule = proto.Field(proto.STRING, number=269964030,) @@ -9214,6 +10390,8 @@ class DeleteGlobalNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network_endpoint_group = proto.Field(proto.STRING, number=433907078,) @@ -9239,7 +10417,8 @@ class DeleteGlobalOperationRequest(proto.Message): class DeleteGlobalOperationResponse(proto.Message): r"""A response message for GlobalOperations.Delete. See the method description for details. - """ + + """ class DeleteGlobalOrganizationOperationRequest(proto.Message): @@ -9251,6 +10430,8 @@ class DeleteGlobalOrganizationOperationRequest(proto.Message): Name of the Operations resource to delete. parent_id (str): Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. """ operation = proto.Field(proto.STRING, number=52090215,) @@ -9260,7 +10441,8 @@ class DeleteGlobalOrganizationOperationRequest(proto.Message): class DeleteGlobalOrganizationOperationResponse(proto.Message): r"""A response message for GlobalOrganizationOperations.Delete. See the method description for details. - """ + + """ class DeleteGlobalPublicDelegatedPrefixeRequest(proto.Message): @@ -9287,6 +10469,8 @@ class DeleteGlobalPublicDelegatedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -9319,6 +10503,8 @@ class DeleteHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check = proto.Field(proto.STRING, number=308876645,) @@ -9351,6 +10537,8 @@ class DeleteImageRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ image = proto.Field(proto.STRING, number=100313435,) @@ -9384,6 +10572,8 @@ class DeleteInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. @@ -9420,6 +10610,8 @@ class DeleteInstanceGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the instance group is located. @@ -9456,6 +10648,8 @@ class DeleteInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -9491,6 +10685,8 @@ class DeleteInstanceTemplateRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_template = proto.Field(proto.STRING, number=309248228,) @@ -9525,6 +10721,8 @@ class DeleteInstancesInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. @@ -9571,6 +10769,8 @@ class DeleteInstancesRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -9612,6 +10812,8 @@ class DeleteInterconnectAttachmentRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ interconnect_attachment = proto.Field(proto.STRING, number=308135284,) @@ -9645,6 +10847,8 @@ class DeleteInterconnectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ interconnect = proto.Field(proto.STRING, number=224601230,) @@ -9677,6 +10881,8 @@ class DeleteLicenseRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ license_ = proto.Field(proto.STRING, number=166757441,) @@ -9710,6 +10916,8 @@ class DeleteNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the network endpoint group is located. It should comply with @@ -9747,6 +10955,8 @@ class DeleteNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network = proto.Field(proto.STRING, number=232872494,) @@ -9779,6 +10989,8 @@ class DeleteNodeGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -9816,6 +11028,8 @@ class DeleteNodeTemplateRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ node_template = proto.Field(proto.STRING, number=323154455,) @@ -9852,6 +11066,8 @@ class DeleteNodesNodeGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -9893,6 +11109,8 @@ class DeletePacketMirroringRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ packet_mirroring = proto.Field(proto.STRING, number=22305996,) @@ -9982,6 +11200,8 @@ class DeletePublicAdvertisedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -10015,6 +11235,8 @@ class DeletePublicDelegatedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -10050,6 +11272,8 @@ class DeleteRegionAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ autoscaler = proto.Field(proto.STRING, number=517258967,) @@ -10086,6 +11310,8 @@ class DeleteRegionBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -10122,6 +11348,8 @@ class DeleteRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ disk = proto.Field(proto.STRING, number=3083677,) @@ -10157,6 +11385,8 @@ class DeleteRegionHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check = proto.Field(proto.STRING, number=308876645,) @@ -10194,6 +11424,8 @@ class DeleteRegionHealthCheckServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check_service = proto.Field(proto.STRING, number=408374747,) @@ -10229,6 +11461,8 @@ class DeleteRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -10267,6 +11501,8 @@ class DeleteRegionNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network_endpoint_group = proto.Field(proto.STRING, number=433907078,) @@ -10301,6 +11537,8 @@ class DeleteRegionNotificationEndpointRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ notification_endpoint = proto.Field(proto.STRING, number=376807017,) @@ -10330,7 +11568,8 @@ class DeleteRegionOperationRequest(proto.Message): class DeleteRegionOperationResponse(proto.Message): r"""A response message for RegionOperations.Delete. See the method description for details. - """ + + """ class DeleteRegionSslCertificateRequest(proto.Message): @@ -10356,6 +11595,8 @@ class DeleteRegionSslCertificateRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. ssl_certificate (str): Name of the SslCertificate resource to delete. @@ -10392,6 +11633,8 @@ class DeleteRegionTargetHttpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy (str): Name of the TargetHttpProxy resource to delete. @@ -10426,6 +11669,8 @@ class DeleteRegionTargetHttpsProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy (str): Name of the TargetHttpsProxy resource to delete. @@ -10449,6 +11694,8 @@ class DeleteRegionUrlMapRequest(proto.Message): request_id (str): begin_interface: MixerMutationRequestBuilder Request ID to support idempotency. + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap resource to delete. """ @@ -10482,6 +11729,8 @@ class DeleteReservationRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. reservation (str): Name of the reservation to delete. zone (str): @@ -10519,6 +11768,8 @@ class DeleteResourcePolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource_policy (str): Name of the resource policy to delete. """ @@ -10552,6 +11803,8 @@ class DeleteRouteRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. route (str): Name of the Route resource to delete. """ @@ -10586,6 +11839,8 @@ class DeleteRouterRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. router (str): Name of the Router resource to delete. """ @@ -10619,6 +11874,8 @@ class DeleteSecurityPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. security_policy (str): Name of the security policy to delete. """ @@ -10651,6 +11908,8 @@ class DeleteServiceAttachmentRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. service_attachment (str): Name of the ServiceAttachment resource to delete. @@ -10691,6 +11950,8 @@ class DeleteSignedUrlKeyBackendBucketRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_bucket = proto.Field(proto.STRING, number=91714037,) @@ -10728,6 +11989,8 @@ class DeleteSignedUrlKeyBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -10759,6 +12022,8 @@ class DeleteSnapshotRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. snapshot (str): Name of the Snapshot resource to delete. """ @@ -10789,6 +12054,8 @@ class DeleteSslCertificateRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. ssl_certificate (str): Name of the SslCertificate resource to delete. @@ -10822,6 +12089,8 @@ class DeleteSslPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. ssl_policy (str): Name of the SSL policy to delete. The name must be 1-63 characters long, and comply with @@ -10858,6 +12127,8 @@ class DeleteSubnetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. subnetwork (str): Name of the Subnetwork resource to delete. """ @@ -10889,6 +12160,8 @@ class DeleteTargetGrpcProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_grpc_proxy (str): Name of the TargetGrpcProxy resource to delete. @@ -10922,6 +12195,8 @@ class DeleteTargetHttpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy (str): Name of the TargetHttpProxy resource to delete. @@ -10955,6 +12230,8 @@ class DeleteTargetHttpsProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy (str): Name of the TargetHttpsProxy resource to delete. @@ -10988,6 +12265,8 @@ class DeleteTargetInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_instance (str): Name of the TargetInstance resource to delete. @@ -11026,6 +12305,8 @@ class DeleteTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool (str): Name of the TargetPool resource to delete. """ @@ -11059,6 +12340,8 @@ class DeleteTargetSslProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_ssl_proxy (str): Name of the TargetSslProxy resource to delete. @@ -11092,6 +12375,8 @@ class DeleteTargetTcpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_tcp_proxy (str): Name of the TargetTcpProxy resource to delete. @@ -11127,6 +12412,8 @@ class DeleteTargetVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_vpn_gateway (str): Name of the target VPN gateway to delete. """ @@ -11160,6 +12447,8 @@ class DeleteUrlMapRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap resource to delete. """ @@ -11194,6 +12483,8 @@ class DeleteVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. vpn_gateway (str): Name of the VPN gateway to delete. """ @@ -11229,6 +12520,8 @@ class DeleteVpnTunnelRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. vpn_tunnel (str): Name of the VpnTunnel resource to delete. """ @@ -11260,11 +12553,13 @@ class DeleteZoneOperationRequest(proto.Message): class DeleteZoneOperationResponse(proto.Message): r"""A response message for ZoneOperations.Delete. See the method description for details. - """ + + """ class Denied(proto.Message): r""" + Attributes: I_p_protocol (str): The IP protocol to which this rule applies. @@ -11273,6 +12568,8 @@ class Denied(proto.Message): the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp) or the IP protocol number. + + This field is a member of `oneof`_ ``_I_p_protocol``. ports (Sequence[str]): An optional list of ports to which this rule applies. This field is only applicable for the UDP or TCP protocol. Each @@ -11313,6 +12610,8 @@ class DeprecateImageRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ deprecation_status_resource = proto.Field( @@ -11325,6 +12624,7 @@ class DeprecateImageRequest(proto.Message): class DeprecationStatus(proto.Message): r"""Deprecation status for a public resource. + Attributes: deleted (str): An optional RFC3339 timestamp on or after @@ -11332,23 +12632,31 @@ class DeprecationStatus(proto.Message): change to DELETED. This is only informational and the status will not change unless the client explicitly changes it. + + This field is a member of `oneof`_ ``_deleted``. deprecated (str): An optional RFC3339 timestamp on or after which the state of this resource is intended to change to DEPRECATED. This is only informational and the status will not change unless the client explicitly changes it. + + This field is a member of `oneof`_ ``_deprecated``. obsolete (str): An optional RFC3339 timestamp on or after which the state of this resource is intended to change to OBSOLETE. This is only informational and the status will not change unless the client explicitly changes it. + + This field is a member of `oneof`_ ``_obsolete``. replacement (str): The URL of the suggested replacement for a deprecated resource. The suggested replacement resource must be the same kind of resource as the deprecated resource. + + This field is a member of `oneof`_ ``_replacement``. state (google.cloud.compute_v1.types.DeprecationStatus.State): The deprecation state of this resource. This can be ACTIVE, DEPRECATED, OBSOLETE, or DELETED. @@ -11360,6 +12668,8 @@ class DeprecationStatus(proto.Message): recommending its replacement. Operations which use OBSOLETE or DELETED resources will be rejected and result in an error. + + This field is a member of `oneof`_ ``_state``. """ class State(proto.Enum): @@ -11414,6 +12724,8 @@ class DetachDiskInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -11455,6 +12767,8 @@ class DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ global_network_endpoint_groups_detach_endpoints_request_resource = proto.Field( @@ -11497,6 +12811,8 @@ class DetachNetworkEndpointsNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the network endpoint group is located. It should comply with @@ -11537,6 +12853,8 @@ class DisableXpnHostProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -11568,6 +12886,8 @@ class DisableXpnResourceProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -11592,10 +12912,14 @@ class Disk(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): Encrypts the disk using a customer-supplied encryption key or a customer-managed encryption key. Encryption keys do not @@ -11616,6 +12940,8 @@ class Disk(proto.Message): then the disk is encrypted using an automatically generated key and you don't need to provide a key to use the disk later. + + This field is a member of `oneof`_ ``_disk_encryption_key``. guest_os_features (Sequence[google.cloud.compute_v1.types.GuestOsFeature]): A list of features to enable on the guest operating system. Applicable only for bootable @@ -11624,9 +12950,13 @@ class Disk(proto.Message): id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#disk for disks. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for the labels being applied to this disk, which is essentially a hash of the @@ -11639,13 +12969,19 @@ class Disk(proto.Message): with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a disk. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.Disk.LabelsEntry]): Labels to apply to this disk. These can be later modified by the setLabels method. last_attach_timestamp (str): [Output Only] Last attach timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_attach_timestamp``. last_detach_timestamp (str): [Output Only] Last detach timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_detach_timestamp``. license_codes (Sequence[int]): Integer license codes indicating which licenses are attached to this disk. @@ -11656,6 +12992,8 @@ class Disk(proto.Message): An opaque location hint used to place the disk close to other resources. This field is for use by internal tools that use the public API. + + This field is a member of `oneof`_ ``_location_hint``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -11665,8 +13003,12 @@ class Disk(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. options (str): Internal use only. + + This field is a member of `oneof`_ ``_options``. physical_block_size_bytes (int): Physical block size of the persistent disk, in bytes. If not present in a request, a default @@ -11675,17 +13017,23 @@ class Disk(proto.Message): an unsupported value is requested, the error message will list the supported values for the caller's project. + + This field is a member of `oneof`_ ``_physical_block_size_bytes``. provisioned_iops (int): Indicates how many IOPS to provision for the disk. This sets the number of I/O operations per second that the disk can handle. Values must be between 10,000 and 120,000. For more details, see the Extreme persistent disk documentation. + + This field is a member of `oneof`_ ``_provisioned_iops``. region (str): [Output Only] URL of the region where the disk resides. Only applicable for regional resources. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. replica_zones (Sequence[str]): URLs of the zones where the disk should be replicated to. Only applicable for regional @@ -11695,9 +13043,13 @@ class Disk(proto.Message): automatic snapshot creations. satisfies_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] Server-defined fully-qualified URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. size_gb (int): Size, in GB, of the persistent disk. You can specify this field when creating a persistent @@ -11708,6 +13060,8 @@ class Disk(proto.Message): sizeGb must not be less than the size of the source. Acceptable values are 1 to 65536, inclusive. + + This field is a member of `oneof`_ ``_size_gb``. source_disk (str): The source disk used to create this disk. You can provide this as a partial or full URL to the @@ -11721,6 +13075,8 @@ class Disk(proto.Message): projects/project/regions/region/disks/disk - zones/zone/disks/disk - regions/region/disks/disk + + This field is a member of `oneof`_ ``_source_disk``. source_disk_id (str): [Output Only] The unique ID of the disk used to create this disk. This value identifies the exact disk that was used to @@ -11728,6 +13084,8 @@ class Disk(proto.Message): persistent disk from a disk that was later deleted and recreated under the same name, the source disk ID would identify the exact version of the disk that was used. + + This field is a member of `oneof`_ ``_source_disk_id``. source_image (str): The source image used to create this disk. If the source image is deleted, this field will not @@ -11748,10 +13106,14 @@ class Disk(proto.Message): that family. Replace the image name with family/family-name: global/images/family/my- image-family + + This field is a member of `oneof`_ ``_source_image``. source_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source image. Required if the source image is protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_image_encryption_key``. source_image_id (str): [Output Only] The ID value of the image used to create this disk. This value identifies the exact image that was used to @@ -11759,6 +13121,8 @@ class Disk(proto.Message): persistent disk from an image that was later deleted and recreated under the same name, the source image ID would identify the exact version of the image that was used. + + This field is a member of `oneof`_ ``_source_image_id``. source_snapshot (str): The source snapshot used to create this disk. You can provide this as a partial or full URL to @@ -11768,11 +13132,15 @@ class Disk(proto.Message): /global/snapshots/snapshot - projects/project/global/snapshots/snapshot - global/snapshots/snapshot + + This field is a member of `oneof`_ ``_source_snapshot``. source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source snapshot. Required if the source snapshot is protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. source_snapshot_id (str): [Output Only] The unique ID of the snapshot used to create this disk. This value identifies the exact snapshot that was @@ -11781,6 +13149,8 @@ class Disk(proto.Message): deleted and recreated under the same name, the source snapshot ID would identify the exact version of the snapshot that was used. + + This field is a member of `oneof`_ ``_source_snapshot_id``. source_storage_object (str): The full Google Cloud Storage URI where the disk image is stored. This file must be a gzip- @@ -11792,11 +13162,15 @@ class Disk(proto.Message): source storage object. To create many disks from a source storage object, use gcloud compute images import instead. + + This field is a member of `oneof`_ ``_source_storage_object``. status (google.cloud.compute_v1.types.Disk.Status): [Output Only] The status of disk creation. - CREATING: Disk is provisioning. - RESTORING: Source data is being copied into the disk. - FAILED: Disk creation failed. - READY: Disk is ready for use. - DELETING: Disk is deleting. + + This field is a member of `oneof`_ ``_status``. type_ (str): URL of the disk type resource describing which disk type to use to create the disk. @@ -11804,6 +13178,8 @@ class Disk(proto.Message): example: projects/project /zones/zone/diskTypes/pd-ssd . See Persistent disk types. + + This field is a member of `oneof`_ ``_type``. users (Sequence[str]): [Output Only] Links to the users of the disk (attached instances) in form: @@ -11812,6 +13188,8 @@ class Disk(proto.Message): [Output Only] URL of the zone where the disk resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. """ class Status(proto.Enum): @@ -11877,16 +13255,21 @@ class Status(proto.Enum): class DiskAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.DiskAggregatedList.ItemsEntry]): A list of DisksScopedList resources. kind (str): [Output Only] Type of resource. Always compute#diskAggregatedList for aggregated lists of persistent disks. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -11894,12 +13277,18 @@ class DiskAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -11928,13 +13317,19 @@ class DiskInstantiationConfig(proto.Message): Specifies whether the disk will be auto- eleted when the instance is deleted (but not when the disk is detached from the instance). + + This field is a member of `oneof`_ ``_auto_delete``. custom_image (str): The custom source image to be used to restore this disk when instantiating this instance template. + + This field is a member of `oneof`_ ``_custom_image``. device_name (str): Specifies the device name of the disk to which the configurations apply to. + + This field is a member of `oneof`_ ``_device_name``. instantiate_from (google.cloud.compute_v1.types.DiskInstantiationConfig.InstantiateFrom): Specifies whether to include the disk and what image to use. Possible values are: - @@ -11954,6 +13349,8 @@ class DiskInstantiationConfig(proto.Message): from the template. Applicable to additional read-write disks, local SSDs, and read-only disks. + + This field is a member of `oneof`_ ``_instantiate_from``. """ class InstantiateFrom(proto.Enum): @@ -11990,15 +13387,20 @@ class InstantiateFrom(proto.Enum): class DiskList(proto.Message): r"""A list of Disk resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Disk]): A list of Disk resources. kind (str): [Output Only] Type of resource. Always compute#diskList for lists of disks. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -12006,10 +13408,16 @@ class DiskList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -12028,6 +13436,7 @@ def raw_page(self): class DiskMoveRequest(proto.Message): r""" + Attributes: destination_zone (str): The URL of the destination zone to move the @@ -12036,6 +13445,8 @@ class DiskMoveRequest(proto.Message): zone: - https://www.googleapis.com/compute/v1/projects/project/zones/zone - projects/project/zones/zone - zones/zone + + This field is a member of `oneof`_ ``_destination_zone``. target_disk (str): The URL of the target disk to move. This can be a full or partial URL. For example, the @@ -12044,6 +13455,8 @@ class DiskMoveRequest(proto.Message): /disks/disk - projects/project/zones/zone/disks/disk - zones/zone/disks/disk + + This field is a member of `oneof`_ ``_target_disk``. """ destination_zone = proto.Field(proto.STRING, number=131854653, optional=True,) @@ -12065,35 +13478,57 @@ class DiskType(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. default_disk_size_gb (int): [Output Only] Server-defined default disk size in GB. + + This field is a member of `oneof`_ ``_default_disk_size_gb``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): [Output Only] The deprecation status associated with this disk type. + + This field is a member of `oneof`_ ``_deprecated``. description (str): [Output Only] An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#diskType for disk types. + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. region (str): [Output Only] URL of the region where the disk type resides. Only applicable for regional resources. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. valid_disk_size (str): [Output Only] An optional textual description of the valid disk size, such as "10GB-10TB". + + This field is a member of `oneof`_ ``_valid_disk_size``. zone (str): [Output Only] URL of the zone where the disk type resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -12113,15 +13548,20 @@ class DiskType(proto.Message): class DiskTypeAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.DiskTypeAggregatedList.ItemsEntry]): A list of DiskTypesScopedList resources. kind (str): [Output Only] Type of resource. Always compute#diskTypeAggregatedList. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -12129,12 +13569,18 @@ class DiskTypeAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -12156,15 +13602,20 @@ def raw_page(self): class DiskTypeList(proto.Message): r"""Contains a list of disk types. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.DiskType]): A list of DiskType resources. kind (str): [Output Only] Type of resource. Always compute#diskTypeList for disk types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -12172,10 +13623,16 @@ class DiskTypeList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -12194,12 +13651,15 @@ def raw_page(self): class DiskTypesScopedList(proto.Message): r""" + Attributes: disk_types (Sequence[google.cloud.compute_v1.types.DiskType]): [Output Only] A list of disk types contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of disk types when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ disk_types = proto.RepeatedField( @@ -12212,6 +13672,7 @@ class DiskTypesScopedList(proto.Message): class DisksAddResourcePoliciesRequest(proto.Message): r""" + Attributes: resource_policies (Sequence[str]): Full or relative path to the resource policy @@ -12224,6 +13685,7 @@ class DisksAddResourcePoliciesRequest(proto.Message): class DisksRemoveResourcePoliciesRequest(proto.Message): r""" + Attributes: resource_policies (Sequence[str]): Resource policies to be removed from this @@ -12235,10 +13697,13 @@ class DisksRemoveResourcePoliciesRequest(proto.Message): class DisksResizeRequest(proto.Message): r""" + Attributes: size_gb (int): The new size of the persistent disk, which is specified in GB. + + This field is a member of `oneof`_ ``_size_gb``. """ size_gb = proto.Field(proto.INT64, number=494929369, optional=True,) @@ -12246,12 +13711,15 @@ class DisksResizeRequest(proto.Message): class DisksScopedList(proto.Message): r""" + Attributes: disks (Sequence[google.cloud.compute_v1.types.Disk]): [Output Only] A list of disks contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of disks when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ disks = proto.RepeatedField(proto.MESSAGE, number=95594102, message="Disk",) @@ -12262,10 +13730,13 @@ class DisksScopedList(proto.Message): class DisplayDevice(proto.Message): r"""A set of Display Device options + Attributes: enable_display (bool): Defines whether the instance has Display enabled. + + This field is a member of `oneof`_ ``_enable_display``. """ enable_display = proto.Field(proto.BOOL, number=14266886, optional=True,) @@ -12273,12 +13744,15 @@ class DisplayDevice(proto.Message): class DistributionPolicy(proto.Message): r""" + Attributes: target_shape (google.cloud.compute_v1.types.DistributionPolicy.TargetShape): The distribution shape to which the group converges either proactively or on resize events (depending on the value set in updatePolicy.instanceRedistributionType). + + This field is a member of `oneof`_ ``_target_shape``. zones (Sequence[google.cloud.compute_v1.types.DistributionPolicyZoneConfiguration]): Zones where the regional managed instance group will create and manage its instances. @@ -12304,11 +13778,14 @@ class TargetShape(proto.Enum): class DistributionPolicyZoneConfiguration(proto.Message): r""" + Attributes: zone (str): The URL of the zone. The zone must exist in the region where the managed instance group is located. + + This field is a member of `oneof`_ ``_zone``. """ zone = proto.Field(proto.STRING, number=3744684, optional=True,) @@ -12326,11 +13803,15 @@ class Duration(proto.Message): resolution. Durations less than one second are represented with a 0 ``seconds`` field and a positive ``nanos`` field. Must be from 0 to 999,999,999 inclusive. + + This field is a member of `oneof`_ ``_nanos``. seconds (int): Span of time at a resolution of a second. Must be from 0 to 315,576,000,000 inclusive. Note: these bounds are computed from: 60 sec/min \* 60 min/hr \* 24 hr/day \* 365.25 days/year \* 10000 years + + This field is a member of `oneof`_ ``_seconds``. """ nanos = proto.Field(proto.INT32, number=104586303, optional=True,) @@ -12360,6 +13841,8 @@ class EnableXpnHostProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -12391,6 +13874,8 @@ class EnableXpnResourceProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -12415,14 +13900,21 @@ class Error(proto.Message): class Errors(proto.Message): r""" + Attributes: code (str): [Output Only] The error type identifier for this error. + + This field is a member of `oneof`_ ``_code``. location (str): [Output Only] Indicates the field in the request that caused the error. This property is optional. + + This field is a member of `oneof`_ ``_location``. message (str): [Output Only] An optional, human-readable error message. + + This field is a member of `oneof`_ ``_message``. """ code = proto.Field(proto.STRING, number=3059181, optional=True,) @@ -12432,9 +13924,12 @@ class Errors(proto.Message): class ExchangedPeeringRoute(proto.Message): r""" + Attributes: dest_range (str): The destination range of the route. + + This field is a member of `oneof`_ ``_dest_range``. imported (bool): True if the peering route has been imported from a peer. The actual import happens if the @@ -12443,13 +13938,21 @@ class ExchangedPeeringRoute(proto.Message): networkPeering.exportCustomRoutes is true for the peer network, and the import does not result in a route conflict. + + This field is a member of `oneof`_ ``_imported``. next_hop_region (str): The region of peering route next hop, only applies to dynamic routes. + + This field is a member of `oneof`_ ``_next_hop_region``. priority (int): The priority of the peering route. + + This field is a member of `oneof`_ ``_priority``. type_ (google.cloud.compute_v1.types.ExchangedPeeringRoute.Type): The type of the peering route. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -12468,16 +13971,21 @@ class Type(proto.Enum): class ExchangedPeeringRoutesList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ExchangedPeeringRoute]): A list of ExchangedPeeringRoute resources. kind (str): [Output Only] Type of resource. Always compute#exchangedPeeringRoutesList for exchanged peering routes lists. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -12485,10 +13993,16 @@ class ExchangedPeeringRoutesList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -12532,6 +14046,8 @@ class ExpandIpCidrRangeSubnetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. subnetwork (str): Name of the Subnetwork resource to update. subnetworks_expand_ip_cidr_range_request_resource (google.cloud.compute_v1.types.SubnetworksExpandIpCidrRangeRequest): @@ -12572,18 +14088,26 @@ class Expr(proto.Message): Optional. Description of the expression. This is a longer text which describes the expression, e.g. when hovered over it in a UI. + + This field is a member of `oneof`_ ``_description``. expression (str): Textual representation of an expression in Common Expression Language syntax. + + This field is a member of `oneof`_ ``_expression``. location (str): Optional. String indicating the location of the expression for error reporting, e.g. a file name and a position in the file. + + This field is a member of `oneof`_ ``_location``. title (str): Optional. Title for the expression, i.e. a short string describing its purpose. This can be used e.g. in UIs which allow to enter the expression. + + This field is a member of `oneof`_ ``_title``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -12606,13 +14130,19 @@ class ExternalVpnGateway(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. interfaces (Sequence[google.cloud.compute_v1.types.ExternalVpnGatewayInterface]): A list of interfaces for this external VPN gateway. If your peer-side gateway is an on- @@ -12625,6 +14155,8 @@ class ExternalVpnGateway(proto.Message): kind (str): [Output Only] Type of the resource. Always compute#externalVpnGateway for externalVpnGateways. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for the labels being applied to this ExternalVpnGateway, which is essentially a @@ -12638,6 +14170,8 @@ class ExternalVpnGateway(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an ExternalVpnGateway. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.ExternalVpnGateway.LabelsEntry]): Labels for this resource. These can only be added or modified by the setLabels method. Each @@ -12652,11 +14186,17 @@ class ExternalVpnGateway(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. redundancy_type (google.cloud.compute_v1.types.ExternalVpnGateway.RedundancyType): Indicates the user-supplied redundancy type of this external VPN gateway. + + This field is a member of `oneof`_ ``_redundancy_type``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ class RedundancyType(proto.Enum): @@ -12686,12 +14226,15 @@ class RedundancyType(proto.Enum): class ExternalVpnGatewayInterface(proto.Message): r"""The interface for the external VPN gateway. + Attributes: id (int): The numeric ID of this interface. The allowed input values for this id for different redundancy types of external VPN gateway: - SINGLE_IP_INTERNALLY_REDUNDANT - 0 - TWO_IPS_REDUNDANCY - 0, 1 - FOUR_IPS_REDUNDANCY - 0, 1, 2, 3 + + This field is a member of `oneof`_ ``_id``. ip_address (str): IP address of the interface in the external VPN gateway. Only IPv4 is supported. This IP @@ -12699,6 +14242,8 @@ class ExternalVpnGatewayInterface(proto.Message): gateway or another Cloud provider's VPN gateway, it cannot be an IP address from Google Compute Engine. + + This field is a member of `oneof`_ ``_ip_address``. """ id = proto.Field(proto.UINT32, number=3355, optional=True,) @@ -12712,15 +14257,20 @@ class ExternalVpnGatewayList(proto.Message): Attributes: etag (str): + This field is a member of `oneof`_ ``_etag``. id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ExternalVpnGateway]): A list of ExternalVpnGateway resources. kind (str): [Output Only] Type of resource. Always compute#externalVpnGatewayList for lists of externalVpnGateways. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -12728,10 +14278,16 @@ class ExternalVpnGatewayList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -12753,11 +14309,16 @@ def raw_page(self): class FileContentBuffer(proto.Message): r""" + Attributes: content (str): The raw content in the secure keys file. + + This field is a member of `oneof`_ ``_content``. file_type (google.cloud.compute_v1.types.FileContentBuffer.FileType): The file type of source file. + + This field is a member of `oneof`_ ``_file_type``. """ class FileType(proto.Enum): @@ -12784,6 +14345,8 @@ class Firewall(proto.Message): connection. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. denied (Sequence[google.cloud.compute_v1.types.Denied]): The list of DENY rules specified by this firewall. Each rule specifies a protocol and @@ -12792,6 +14355,8 @@ class Firewall(proto.Message): description (str): An optional description of this resource. Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. destination_ranges (Sequence[str]): If destination ranges are specified, the firewall rule applies only to traffic that has @@ -12804,22 +14369,32 @@ class Firewall(proto.Message): ``INGRESS`` traffic, you cannot specify the destinationRanges field, and for ``EGRESS`` traffic, you cannot specify the sourceRanges or sourceTags fields. + + This field is a member of `oneof`_ ``_direction``. disabled (bool): Denotes whether the firewall rule is disabled. When set to true, the firewall rule is not enforced and the network behaves as if it did not exist. If this is unspecified, the firewall rule will be enabled. + + This field is a member of `oneof`_ ``_disabled``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#firewall for firewall rules. + + This field is a member of `oneof`_ ``_kind``. log_config (google.cloud.compute_v1.types.FirewallLogConfig): This field denotes the logging options for a particular firewall rule. If logging is enabled, logs will be exported to Cloud Logging. + + This field is a member of `oneof`_ ``_log_config``. name (str): Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, @@ -12830,6 +14405,8 @@ class Firewall(proto.Message): the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit. + + This field is a member of `oneof`_ ``_name``. network (str): URL of the network resource for this firewall rule. If not specified when creating a firewall @@ -12841,6 +14418,8 @@ class Firewall(proto.Message): https://www.googleapis.com/compute/v1/projects/myproject/global/networks/my- network - projects/myproject/global/networks/my- network - global/networks/default + + This field is a member of `oneof`_ ``_network``. priority (int): Priority for this rule. This is an integer between ``0`` and ``65535``, both inclusive. The default value is ``1000``. @@ -12852,8 +14431,12 @@ class Firewall(proto.Message): Note that VPC networks have implied rules with a priority of ``65535``. To avoid conflicts with the implied rules, use a priority number less than ``65535``. + + This field is a member of `oneof`_ ``_priority``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. source_ranges (Sequence[str]): If source ranges are specified, the firewall rule applies only to traffic that has a source @@ -12958,15 +14541,20 @@ class Direction(proto.Enum): class FirewallList(proto.Message): r"""Contains a list of firewalls. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Firewall]): A list of Firewall resources. kind (str): [Output Only] Type of resource. Always compute#firewallList for lists of firewalls. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -12974,10 +14562,16 @@ class FirewallList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -12996,15 +14590,20 @@ def raw_page(self): class FirewallLogConfig(proto.Message): r"""The available logging options for a firewall rule. + Attributes: enable (bool): This field denotes whether to enable logging for a particular firewall rule. + + This field is a member of `oneof`_ ``_enable``. metadata (google.cloud.compute_v1.types.FirewallLogConfig.Metadata): This field can only be specified for a particular firewall rule if logging is enabled for that rule. This field denotes whether to include or exclude metadata for firewall logs. + + This field is a member of `oneof`_ ``_metadata``. """ class Metadata(proto.Enum): @@ -13022,6 +14621,7 @@ class Metadata(proto.Enum): class FirewallPoliciesListAssociationsResponse(proto.Message): r""" + Attributes: associations (Sequence[google.cloud.compute_v1.types.FirewallPolicyAssociation]): A list of associations. @@ -13029,6 +14629,8 @@ class FirewallPoliciesListAssociationsResponse(proto.Message): [Output Only] Type of firewallPolicy associations. Always compute#FirewallPoliciesListAssociations for lists of firewallPolicy associations. + + This field is a member of `oneof`_ ``_kind``. """ associations = proto.RepeatedField( @@ -13039,16 +14641,21 @@ class FirewallPoliciesListAssociationsResponse(proto.Message): class FirewallPolicy(proto.Message): r"""Represents a Firewall Policy resource. + Attributes: associations (Sequence[google.cloud.compute_v1.types.FirewallPolicyAssociation]): A list of associations that belong to this firewall policy. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. display_name (str): Deprecated, please use short name instead. User-provided name of the Organization firewall policy. The name should be @@ -13061,6 +14668,8 @@ class FirewallPolicy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_display_name``. fingerprint (str): Specifies a fingerprint for this resource, which is essentially a hash of the metadata's @@ -13073,22 +14682,34 @@ class FirewallPolicy(proto.Message): fail with error 412 conditionNotMet. To see the latest fingerprint, make get() request to the firewall policy. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output only] Type of the resource. Always compute#firewallPolicyfor firewall policies + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] Name of the resource. It is a numeric ID allocated by GCP which uniquely identifies the Firewall Policy. + + This field is a member of `oneof`_ ``_name``. parent (str): [Output Only] The parent of the firewall policy. + + This field is a member of `oneof`_ ``_parent``. rule_tuple_count (int): [Output Only] Total count of all firewall policy rule tuples. A firewall policy can not exceed a set number of tuples. + + This field is a member of `oneof`_ ``_rule_tuple_count``. rules (Sequence[google.cloud.compute_v1.types.FirewallPolicyRule]): A list of rules that belong to this policy. There must always be a default rule (rule with priority 2147483647 and @@ -13097,9 +14718,13 @@ class FirewallPolicy(proto.Message): added. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. self_link_with_id (str): [Output Only] Server-defined URL for this resource with the resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. short_name (str): User-provided name of the Organization firewall plicy. The name should be unique in the organization in which the @@ -13111,6 +14736,8 @@ class FirewallPolicy(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_short_name``. """ associations = proto.RepeatedField( @@ -13135,20 +14762,31 @@ class FirewallPolicy(proto.Message): class FirewallPolicyAssociation(proto.Message): r""" + Attributes: attachment_target (str): The target that the firewall policy is attached to. + + This field is a member of `oneof`_ ``_attachment_target``. display_name (str): [Output Only] Deprecated, please use short name instead. The display name of the firewall policy of the association. + + This field is a member of `oneof`_ ``_display_name``. firewall_policy_id (str): [Output Only] The firewall policy ID of the association. + + This field is a member of `oneof`_ ``_firewall_policy_id``. name (str): The name for an association. + + This field is a member of `oneof`_ ``_name``. short_name (str): [Output Only] The short name of the firewall policy of the association. + + This field is a member of `oneof`_ ``_short_name``. """ attachment_target = proto.Field(proto.STRING, number=175773741, optional=True,) @@ -13160,15 +14798,20 @@ class FirewallPolicyAssociation(proto.Message): class FirewallPolicyList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.FirewallPolicy]): A list of FirewallPolicy resources. kind (str): [Output Only] Type of resource. Always compute#firewallPolicyList for listsof FirewallPolicies + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -13176,8 +14819,12 @@ class FirewallPolicyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -13206,29 +14853,43 @@ class FirewallPolicyRule(proto.Message): connection triggers the rule. Can currently be either "allow" or "deny()" where valid values for status are 403, 404, and 502. + + This field is a member of `oneof`_ ``_action``. description (str): An optional description for this resource. + + This field is a member of `oneof`_ ``_description``. direction (google.cloud.compute_v1.types.FirewallPolicyRule.Direction): The direction in which this rule applies. + + This field is a member of `oneof`_ ``_direction``. disabled (bool): Denotes whether the firewall policy rule is disabled. When set to true, the firewall policy rule is not enforced and traffic behaves as if it did not exist. If this is unspecified, the firewall policy rule will be enabled. + + This field is a member of `oneof`_ ``_disabled``. enable_logging (bool): Denotes whether to enable logging for a particular rule. If logging is enabled, logs will be exported to the configured export destination in Stackdriver. Logs may be exported to BigQuery or Pub/Sub. Note: you cannot enable logging on "goto_next" rules. + + This field is a member of `oneof`_ ``_enable_logging``. kind (str): [Output only] Type of the resource. Always compute#firewallPolicyRule for firewall policy rules + + This field is a member of `oneof`_ ``_kind``. match (google.cloud.compute_v1.types.FirewallPolicyRuleMatcher): A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + + This field is a member of `oneof`_ ``_match``. priority (int): An integer indicating the priority of a rule in the list. The priority must be a positive @@ -13236,9 +14897,13 @@ class FirewallPolicyRule(proto.Message): evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest prority. + + This field is a member of `oneof`_ ``_priority``. rule_tuple_count (int): [Output Only] Calculation of the complexity of a single firewall policy rule. + + This field is a member of `oneof`_ ``_rule_tuple_count``. target_resources (Sequence[str]): A list of network resource URLs to which this rule applies. This field allows you to control @@ -13304,6 +14969,7 @@ class FirewallPolicyRuleMatcher(proto.Message): class FirewallPolicyRuleMatcherLayer4Config(proto.Message): r""" + Attributes: ip_protocol (str): The IP protocol to which this rule applies. @@ -13312,6 +14978,8 @@ class FirewallPolicyRuleMatcherLayer4Config(proto.Message): the following well known protocol strings (tcp, udp, icmp, esp, ah, ipip, sctp), or the IP protocol number. + + This field is a member of `oneof`_ ``_ip_protocol``. ports (Sequence[str]): An optional list of ports to which this rule applies. This field is only applicable for UDP or TCP protocol. Each entry @@ -13338,13 +15006,19 @@ class FixedOrPercent(proto.Message): a managed instance group with 150 instances would be (80/100 \* 150) = 120 VM instances. If there is a remainder, the number is rounded. + + This field is a member of `oneof`_ ``_calculated``. fixed (int): Specifies a fixed number of VM instances. This must be a positive integer. + + This field is a member of `oneof`_ ``_fixed``. percent (int): Specifies a percentage of instances between 0 to 100%, inclusive. For example, specify 80 for 80%. + + This field is a member of `oneof`_ ``_percent``. """ calculated = proto.Field(proto.INT32, number=472082878, optional=True,) @@ -13386,6 +15060,8 @@ class ForwardingRule(proto.Message): targetGrpcProxy that has validateForProxyless field set to true. For Private Service Connect forwarding rules that forward traffic to Google APIs, IP address must be provided. + + This field is a member of `oneof`_ ``_I_p_address``. I_p_protocol (google.cloud.compute_v1.types.ForwardingRule.IPProtocol): The IP protocol to which this rule applies. For protocol forwarding, valid options are TCP, UDP, ESP, AH, SCTP, ICMP @@ -13393,6 +15069,8 @@ class ForwardingRule(proto.Message): different load balancing products as described in `Load balancing features `__. + + This field is a member of `oneof`_ ``_I_p_protocol``. all_ports (bool): This field is used along with the backend_service field for Internal TCP/UDP Load Balancing or Network Load Balancing, @@ -13401,6 +15079,8 @@ class ForwardingRule(proto.Message): port_range, or allPorts. The three are mutually exclusive. For TCP, UDP and SCTP traffic, packets addressed to any ports will be forwarded to the target or backendService. + + This field is a member of `oneof`_ ``_all_ports``. allow_global_access (bool): This field is used along with the backend_service field for internal load balancing or with the target field for @@ -13408,18 +15088,26 @@ class ForwardingRule(proto.Message): clients can access ILB from all regions. Otherwise only allows access from clients in the same region as the internal load balancer. + + This field is a member of `oneof`_ ``_allow_global_access``. backend_service (str): Identifies the backend service to which the forwarding rule sends traffic. Required for Internal TCP/UDP Load Balancing and Network Load Balancing; must be omitted for all other load balancer types. + + This field is a member of `oneof`_ ``_backend_service``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -13430,14 +15118,20 @@ class ForwardingRule(proto.Message): from another concurrent request. To see the latest fingerprint, make a get() request to retrieve a ForwardingRule. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. ip_version (google.cloud.compute_v1.types.ForwardingRule.IpVersion): The IP Version that will be used by this forwarding rule. Valid options are IPV4 or IPV6. This can only be specified for an external global forwarding rule. + + This field is a member of `oneof`_ ``_ip_version``. is_mirroring_collector (bool): Indicates whether or not this load balancer can be used as a collector for packet mirroring. @@ -13447,9 +15141,13 @@ class ForwardingRule(proto.Message): to them. This can only be set to true for load balancers that have their loadBalancingScheme set to INTERNAL. + + This field is a member of `oneof`_ ``_is_mirroring_collector``. kind (str): [Output Only] Type of the resource. Always compute#forwardingRule for Forwarding Rule resources. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for the labels being applied to this resource, which is essentially a hash of @@ -13462,6 +15160,8 @@ class ForwardingRule(proto.Message): with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a ForwardingRule. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.ForwardingRule.LabelsEntry]): Labels for this resource. These can only be added or modified by the setLabels method. Each @@ -13471,6 +15171,8 @@ class ForwardingRule(proto.Message): Specifies the forwarding rule type. For more information about forwarding rules, refer to Forwarding rule concepts. + + This field is a member of `oneof`_ ``_load_balancing_scheme``. metadata_filters (Sequence[google.cloud.compute_v1.types.MetadataFilter]): Opaque filter criteria used by load balancer to restrict routing configuration to a limited set of xDS compliant @@ -13504,6 +15206,8 @@ class ForwardingRule(proto.Message): traffic to Google APIs, the forwarding rule name must be a 1-20 characters string with lowercase letters and numbers and must start with a letter. + + This field is a member of `oneof`_ ``_name``. network (str): This field is not used for external load balancing. For Internal TCP/UDP Load Balancing, @@ -13514,6 +15218,8 @@ class ForwardingRule(proto.Message): Service Connect forwarding rules that forward traffic to Google APIs, a network must be provided. + + This field is a member of `oneof`_ ``_network``. network_tier (google.cloud.compute_v1.types.ForwardingRule.NetworkTier): This signifies the networking tier used for configuring this load balancer and can only take @@ -13524,6 +15230,8 @@ class ForwardingRule(proto.Message): specified, it is assumed to be PREMIUM. If IPAddress is specified, this value must be equal to the networkTier of the Address. + + This field is a member of `oneof`_ ``_network_tier``. port_range (str): This field can be used only if: - Load balancing scheme is one of EXTERNAL, INTERNAL_SELF_MANAGED or INTERNAL_MANAGED - @@ -13537,6 +15245,8 @@ class ForwardingRule(proto.Message): see `Port specifications `__. @pattern: \\d+(?:-\d+)? + + This field is a member of `oneof`_ ``_port_range``. ports (Sequence[str]): The ports field is only supported when the forwarding rule references a backend_service directly. Only packets @@ -13551,16 +15261,23 @@ class ForwardingRule(proto.Message): psc_connection_id (int): [Output Only] The PSC connection id of the PSC Forwarding Rule. + + This field is a member of `oneof`_ ``_psc_connection_id``. psc_connection_status (google.cloud.compute_v1.types.ForwardingRule.PscConnectionStatus): + This field is a member of `oneof`_ ``_psc_connection_status``. region (str): [Output Only] URL of the region where the regional forwarding rule resides. This field is not applicable to global forwarding rules. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. service_directory_registrations (Sequence[google.cloud.compute_v1.types.ForwardingRuleServiceDirectoryRegistration]): Service Directory resources to register this forwarding rule with. Currently, only supports a @@ -13577,10 +15294,14 @@ class ForwardingRule(proto.Message): following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. This field is only used for internal load balancing. + + This field is a member of `oneof`_ ``_service_label``. service_name (str): [Output Only] The internal fully qualified service name for this Forwarding Rule. This field is only used for internal load balancing. + + This field is a member of `oneof`_ ``_service_name``. subnetwork (str): This field identifies the subnetwork that the load balanced IP should belong to for this @@ -13591,8 +15312,11 @@ class ForwardingRule(proto.Message): specified if the network is in custom subnet mode or when creating external forwarding rule with IPv6. + + This field is a member of `oneof`_ ``_subnetwork``. target (str): + This field is a member of `oneof`_ ``_target``. """ class IPProtocol(proto.Enum): @@ -13703,10 +15427,13 @@ class PscConnectionStatus(proto.Enum): class ForwardingRuleAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ForwardingRuleAggregatedList.ItemsEntry]): A list of ForwardingRulesScopedList resources. @@ -13714,6 +15441,8 @@ class ForwardingRuleAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#forwardingRuleAggregatedList for lists of forwarding rules. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -13721,12 +15450,18 @@ class ForwardingRuleAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -13751,14 +15486,19 @@ def raw_page(self): class ForwardingRuleList(proto.Message): r"""Contains a list of ForwardingRule resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ForwardingRule]): A list of ForwardingRule resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -13766,10 +15506,16 @@ class ForwardingRuleList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -13790,9 +15536,11 @@ def raw_page(self): class ForwardingRuleReference(proto.Message): r""" + Attributes: forwarding_rule (str): + This field is a member of `oneof`_ ``_forwarding_rule``. """ forwarding_rule = proto.Field(proto.STRING, number=269964030, optional=True,) @@ -13808,15 +15556,21 @@ class ForwardingRuleServiceDirectoryRegistration(proto.Message): namespace (str): Service Directory namespace to register the forwarding rule under. + + This field is a member of `oneof`_ ``_namespace``. service (str): Service Directory service to register the forwarding rule under. + + This field is a member of `oneof`_ ``_service``. service_directory_region (str): [Optional] Service Directory region to register this global forwarding rule under. Default to "us-central1". Only used for PSC for Google APIs. All PSC for Google APIs Forwarding Rules on the same network should use the same Service Directory region. + + This field is a member of `oneof`_ ``_service_directory_region``. """ namespace = proto.Field(proto.STRING, number=178476379, optional=True,) @@ -13828,6 +15582,7 @@ class ForwardingRuleServiceDirectoryRegistration(proto.Message): class ForwardingRulesScopedList(proto.Message): r""" + Attributes: forwarding_rules (Sequence[google.cloud.compute_v1.types.ForwardingRule]): A list of forwarding rules contained in this @@ -13835,6 +15590,8 @@ class ForwardingRulesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of forwarding rules when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ forwarding_rules = proto.RepeatedField( @@ -13847,6 +15604,7 @@ class ForwardingRulesScopedList(proto.Message): class GRPCHealthCheck(proto.Message): r""" + Attributes: grpc_service_name (str): The gRPC service name for the health check. This field is @@ -13856,15 +15614,21 @@ class GRPCHealthCheck(proto.Message): service_name means the health of that gRPC service, as defined by the owner of the service. The grpc_service_name can only be ASCII. + + This field is a member of `oneof`_ ``_grpc_service_name``. port (int): The port number for the health check request. Must be specified if port_name and port_specification are not set or if port_specification is USE_FIXED_PORT. Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. port_name (str): Port name as defined in InstanceGroup#NamedPort#name. If both port and port_name are defined, port takes precedence. The port_name should conform to RFC1035. + + This field is a member of `oneof`_ ``_port_name``. port_specification (google.cloud.compute_v1.types.GRPCHealthCheck.PortSpecification): Specifies how port is selected for health checking, can be one of following values: USE_FIXED_PORT: The port number in @@ -13876,6 +15640,8 @@ class GRPCHealthCheck(proto.Message): used for health checking. If not specified, gRPC health check follows behavior specified in port and portName fields. + + This field is a member of `oneof`_ ``_port_specification``. """ class PortSpecification(proto.Enum): @@ -13948,6 +15714,8 @@ class GetAssociationFirewallPolicyRequest(proto.Message): name (str): The name of the association to get from the firewall policy. + + This field is a member of `oneof`_ ``_name``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -14238,6 +16006,8 @@ class GetGlobalOrganizationOperationRequest(proto.Message): Name of the Operations resource to return. parent_id (str): Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. """ operation = proto.Field(proto.STRING, number=52090215,) @@ -14272,9 +16042,13 @@ class GetGuestAttributesInstanceRequest(proto.Message): query_path (str): Specifies the guest attributes path to be queried. + + This field is a member of `oneof`_ ``_query_path``. variable_key (str): Specifies the key for the guest attributes entry. + + This field is a member of `oneof`_ ``_variable_key``. zone (str): The name of the zone for this request. """ @@ -14377,6 +16151,8 @@ class GetIamPolicyDiskRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14400,6 +16176,8 @@ class GetIamPolicyFirewallPolicyRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. resource (str): Name or id of the resource for this request. """ @@ -14417,6 +16195,8 @@ class GetIamPolicyImageRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14437,6 +16217,8 @@ class GetIamPolicyInstanceRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14460,6 +16242,8 @@ class GetIamPolicyInstanceTemplateRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14480,6 +16264,8 @@ class GetIamPolicyLicenseRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14500,6 +16286,8 @@ class GetIamPolicyNodeGroupRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14523,6 +16311,8 @@ class GetIamPolicyNodeTemplateRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. region (str): @@ -14546,6 +16336,8 @@ class GetIamPolicyRegionDiskRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. region (str): @@ -14569,6 +16361,8 @@ class GetIamPolicyReservationRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14592,6 +16386,8 @@ class GetIamPolicyResourcePolicyRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. region (str): @@ -14615,6 +16411,8 @@ class GetIamPolicyServiceAttachmentRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. region (str): @@ -14638,6 +16436,8 @@ class GetIamPolicySnapshotRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. resource (str): @@ -14658,6 +16458,8 @@ class GetIamPolicySubnetworkRequest(proto.Message): Attributes: options_requested_policy_version (int): Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. region (str): @@ -14902,6 +16704,8 @@ class GetNatMappingInfoRoutersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -14909,6 +16713,8 @@ class GetNatMappingInfoRoutersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -14920,10 +16726,14 @@ class GetNatMappingInfoRoutersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -14932,6 +16742,8 @@ class GetNatMappingInfoRoutersRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. router (str): Name of the Router resource to query for Nat Mapping information of VM endpoints. @@ -15516,6 +17328,8 @@ class GetRuleFirewallPolicyRequest(proto.Message): priority (int): The priority of the rule to get from the firewall policy. + + This field is a member of `oneof`_ ``_priority``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -15530,6 +17344,8 @@ class GetRuleSecurityPolicyRequest(proto.Message): priority (int): The priority of the rule to get from the security policy. + + This field is a member of `oneof`_ ``_priority``. project (str): Project ID for this request. security_policy (str): @@ -15585,6 +17401,8 @@ class GetSerialPortOutputInstanceRequest(proto.Message): port (int): Specifies which COM or serial port to retrieve data from. + + This field is a member of `oneof`_ ``_port``. project (str): Project ID for this request. start (int): @@ -15603,6 +17421,8 @@ class GetSerialPortOutputInstanceRequest(proto.Message): to the most recent number of bytes written to the serial port. For example, -3 is interpreted as the most recent 3 bytes written to the serial console. + + This field is a member of `oneof`_ ``_start``. zone (str): The name of the zone for this request. """ @@ -15960,6 +17780,8 @@ class GetXpnResourcesProjectsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -15967,6 +17789,8 @@ class GetXpnResourcesProjectsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -15978,16 +17802,22 @@ class GetXpnResourcesProjectsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -16033,6 +17863,7 @@ class GetZoneRequest(proto.Message): class GlobalNetworkEndpointGroupsAttachEndpointsRequest(proto.Message): r""" + Attributes: network_endpoints (Sequence[google.cloud.compute_v1.types.NetworkEndpoint]): The list of network endpoints to be attached. @@ -16045,6 +17876,7 @@ class GlobalNetworkEndpointGroupsAttachEndpointsRequest(proto.Message): class GlobalNetworkEndpointGroupsDetachEndpointsRequest(proto.Message): r""" + Attributes: network_endpoints (Sequence[google.cloud.compute_v1.types.NetworkEndpoint]): The list of network endpoints to be detached. @@ -16057,6 +17889,7 @@ class GlobalNetworkEndpointGroupsDetachEndpointsRequest(proto.Message): class GlobalOrganizationSetPolicyRequest(proto.Message): r""" + Attributes: bindings (Sequence[google.cloud.compute_v1.types.Binding]): Flatten Policy to create a backward @@ -16066,12 +17899,16 @@ class GlobalOrganizationSetPolicyRequest(proto.Message): Flatten Policy to create a backward compatible wire-format. Deprecated. Use 'policy' to specify the etag. + + This field is a member of `oneof`_ ``_etag``. policy (google.cloud.compute_v1.types.Policy): REQUIRED: The complete policy to be applied to the 'resource'. The size of the policy is limited to a few 10s of KB. An empty policy is in general a valid policy but certain services (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. """ bindings = proto.RepeatedField(proto.MESSAGE, number=403251854, message="Binding",) @@ -16083,6 +17920,7 @@ class GlobalOrganizationSetPolicyRequest(proto.Message): class GlobalSetLabelsRequest(proto.Message): r""" + Attributes: label_fingerprint (str): The fingerprint of the previous set of labels @@ -16095,6 +17933,8 @@ class GlobalSetLabelsRequest(proto.Message): with error 412 conditionNotMet. Make a get() request to the resource to get the latest fingerprint. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.GlobalSetLabelsRequest.LabelsEntry]): A list of labels to apply for this resource. Each label key & value must comply with RFC1035. Specifically, the name @@ -16113,6 +17953,7 @@ class GlobalSetLabelsRequest(proto.Message): class GlobalSetPolicyRequest(proto.Message): r""" + Attributes: bindings (Sequence[google.cloud.compute_v1.types.Binding]): Flatten Policy to create a backward @@ -16122,12 +17963,16 @@ class GlobalSetPolicyRequest(proto.Message): Flatten Policy to create a backward compatible wire-format. Deprecated. Use 'policy' to specify the etag. + + This field is a member of `oneof`_ ``_etag``. policy (google.cloud.compute_v1.types.Policy): REQUIRED: The complete policy to be applied to the 'resource'. The size of the policy is limited to a few 10s of KB. An empty policy is in general a valid policy but certain services (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. """ bindings = proto.RepeatedField(proto.MESSAGE, number=403251854, message="Binding",) @@ -16139,22 +17984,35 @@ class GlobalSetPolicyRequest(proto.Message): class GuestAttributes(proto.Message): r"""A guest attributes entry. + Attributes: kind (str): [Output Only] Type of the resource. Always compute#guestAttributes for guest attributes entry. + + This field is a member of `oneof`_ ``_kind``. query_path (str): The path to be queried. This can be the default namespace ('') or a nested namespace ('\/') or a specified key ('\/\'). + + This field is a member of `oneof`_ ``_query_path``. query_value (google.cloud.compute_v1.types.GuestAttributesValue): [Output Only] The value of the requested queried path. + + This field is a member of `oneof`_ ``_query_value``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. variable_key (str): The key to search for. + + This field is a member of `oneof`_ ``_variable_key``. variable_value (str): [Output Only] The value found for the requested key. + + This field is a member of `oneof`_ ``_variable_value``. """ kind = proto.Field(proto.STRING, number=3292052, optional=True,) @@ -16169,13 +18027,20 @@ class GuestAttributes(proto.Message): class GuestAttributesEntry(proto.Message): r"""A guest attributes namespace/key/value entry. + Attributes: key (str): Key for the guest attribute entry. + + This field is a member of `oneof`_ ``_key``. namespace (str): Namespace for the guest attribute entry. + + This field is a member of `oneof`_ ``_namespace``. value (str): Value for the guest attribute entry. + + This field is a member of `oneof`_ ``_value``. """ key = proto.Field(proto.STRING, number=106079, optional=True,) @@ -16185,6 +18050,7 @@ class GuestAttributesEntry(proto.Message): class GuestAttributesValue(proto.Message): r"""Array of guest attribute namespace/key/value tuples. + Attributes: items (Sequence[google.cloud.compute_v1.types.GuestAttributesEntry]): @@ -16197,11 +18063,14 @@ class GuestAttributesValue(proto.Message): class GuestOsFeature(proto.Message): r"""Guest OS features. + Attributes: type_ (google.cloud.compute_v1.types.GuestOsFeature.Type): The ID of a supported feature. Read Enabling guest operating system features to see a list of available options. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -16223,19 +18092,26 @@ class Type(proto.Enum): class HTTP2HealthCheck(proto.Message): r""" + Attributes: host (str): The value of the host header in the HTTP/2 health check request. If left empty (default value), the IP on behalf of which this health check is performed will be used. + + This field is a member of `oneof`_ ``_host``. port (int): The TCP port number for the health check request. The default value is 443. Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. port_name (str): Port name as defined in InstanceGroup#NamedPort#name. If both port and port_name are defined, port takes precedence. + + This field is a member of `oneof`_ ``_port_name``. port_specification (google.cloud.compute_v1.types.HTTP2HealthCheck.PortSpecification): Specifies how port is selected for health checking, can be one of following values: USE_FIXED_PORT: The port number in @@ -16247,18 +18123,26 @@ class HTTP2HealthCheck(proto.Message): used for health checking. If not specified, HTTP2 health check follows behavior specified in port and portName fields. + + This field is a member of `oneof`_ ``_port_specification``. proxy_header (google.cloud.compute_v1.types.HTTP2HealthCheck.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. request_path (str): The request path of the HTTP/2 health check request. The default value is /. + + This field is a member of `oneof`_ ``_request_path``. response (str): The string to match anywhere in the first 1024 bytes of the response body. If left empty (the default value), the status code determines health. The response data can only be ASCII. + + This field is a member of `oneof`_ ``_response``. """ class PortSpecification(proto.Enum): @@ -16299,19 +18183,26 @@ class ProxyHeader(proto.Enum): class HTTPHealthCheck(proto.Message): r""" + Attributes: host (str): The value of the host header in the HTTP health check request. If left empty (default value), the IP on behalf of which this health check is performed will be used. + + This field is a member of `oneof`_ ``_host``. port (int): The TCP port number for the health check request. The default value is 80. Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. port_name (str): Port name as defined in InstanceGroup#NamedPort#name. If both port and port_name are defined, port takes precedence. + + This field is a member of `oneof`_ ``_port_name``. port_specification (google.cloud.compute_v1.types.HTTPHealthCheck.PortSpecification): Specifies how port is selected for health checking, can be one of following values: USE_FIXED_PORT: The port number in @@ -16323,18 +18214,26 @@ class HTTPHealthCheck(proto.Message): used for health checking. If not specified, HTTP health check follows behavior specified in port and portName fields. + + This field is a member of `oneof`_ ``_port_specification``. proxy_header (google.cloud.compute_v1.types.HTTPHealthCheck.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. request_path (str): The request path of the HTTP health check request. The default value is /. + + This field is a member of `oneof`_ ``_request_path``. response (str): The string to match anywhere in the first 1024 bytes of the response body. If left empty (the default value), the status code determines health. The response data can only be ASCII. + + This field is a member of `oneof`_ ``_response``. """ class PortSpecification(proto.Enum): @@ -16375,19 +18274,26 @@ class ProxyHeader(proto.Enum): class HTTPSHealthCheck(proto.Message): r""" + Attributes: host (str): The value of the host header in the HTTPS health check request. If left empty (default value), the IP on behalf of which this health check is performed will be used. + + This field is a member of `oneof`_ ``_host``. port (int): The TCP port number for the health check request. The default value is 443. Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. port_name (str): Port name as defined in InstanceGroup#NamedPort#name. If both port and port_name are defined, port takes precedence. + + This field is a member of `oneof`_ ``_port_name``. port_specification (google.cloud.compute_v1.types.HTTPSHealthCheck.PortSpecification): Specifies how port is selected for health checking, can be one of following values: USE_FIXED_PORT: The port number in @@ -16399,18 +18305,26 @@ class HTTPSHealthCheck(proto.Message): used for health checking. If not specified, HTTPS health check follows behavior specified in port and portName fields. + + This field is a member of `oneof`_ ``_port_specification``. proxy_header (google.cloud.compute_v1.types.HTTPSHealthCheck.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. request_path (str): The request path of the HTTPS health check request. The default value is /. + + This field is a member of `oneof`_ ``_request_path``. response (str): The string to match anywhere in the first 1024 bytes of the response body. If left empty (the default value), the status code determines health. The response data can only be ASCII. + + This field is a member of `oneof`_ ``_response``. """ class PortSpecification(proto.Enum): @@ -16472,31 +18386,49 @@ class HealthCheck(proto.Message): check_interval_sec (int): How often (in seconds) to send a health check. The default value is 5 seconds. + + This field is a member of `oneof`_ ``_check_interval_sec``. creation_timestamp (str): [Output Only] Creation timestamp in 3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. grpc_health_check (google.cloud.compute_v1.types.GRPCHealthCheck): + This field is a member of `oneof`_ ``_grpc_health_check``. healthy_threshold (int): A so-far unhealthy instance will be marked healthy after this many consecutive successes. The default value is 2. + + This field is a member of `oneof`_ ``_healthy_threshold``. http2_health_check (google.cloud.compute_v1.types.HTTP2HealthCheck): + This field is a member of `oneof`_ ``_http2_health_check``. http_health_check (google.cloud.compute_v1.types.HTTPHealthCheck): + This field is a member of `oneof`_ ``_http_health_check``. https_health_check (google.cloud.compute_v1.types.HTTPSHealthCheck): + This field is a member of `oneof`_ ``_https_health_check``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): Type of the resource. + + This field is a member of `oneof`_ ``_kind``. log_config (google.cloud.compute_v1.types.HealthCheckLogConfig): Configure logging on this health check. + + This field is a member of `oneof`_ ``_log_config``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -16507,30 +18439,44 @@ class HealthCheck(proto.Message): first character is a lowercase letter, and all following characters are a dash, lowercase letter, or digit, except the last character, which isn't a dash. + + This field is a member of `oneof`_ ``_name``. region (str): [Output Only] Region where the health check resides. Not applicable to global health checks. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. ssl_health_check (google.cloud.compute_v1.types.SSLHealthCheck): + This field is a member of `oneof`_ ``_ssl_health_check``. tcp_health_check (google.cloud.compute_v1.types.TCPHealthCheck): + This field is a member of `oneof`_ ``_tcp_health_check``. timeout_sec (int): How long (in seconds) to wait before claiming failure. The default value is 5 seconds. It is invalid for timeoutSec to have greater value than checkIntervalSec. + + This field is a member of `oneof`_ ``_timeout_sec``. type_ (google.cloud.compute_v1.types.HealthCheck.Type): Specifies the type of the healthCheck, either TCP, SSL, HTTP, HTTPS or HTTP2. If not specified, the default is TCP. Exactly one of the protocol-specific health check field must be specified, which must match type field. + + This field is a member of `oneof`_ ``_type``. unhealthy_threshold (int): A so-far healthy instance will be marked unhealthy after this many consecutive failures. The default value is 2. + + This field is a member of `oneof`_ ``_unhealthy_threshold``. """ class Type(proto.Enum): @@ -16585,14 +18531,19 @@ class Type(proto.Enum): class HealthCheckList(proto.Message): r"""Contains a list of HealthCheck resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.HealthCheck]): A list of HealthCheck resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -16600,10 +18551,16 @@ class HealthCheckList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -16629,6 +18586,8 @@ class HealthCheckLogConfig(proto.Message): Indicates whether or not to export logs. This is false by default, which means no health check logging will be done. + + This field is a member of `oneof`_ ``_enable``. """ enable = proto.Field(proto.BOOL, number=311764355, optional=True,) @@ -16645,6 +18604,7 @@ class HealthCheckReference(proto.Message): Attributes: health_check (str): + This field is a member of `oneof`_ ``_health_check``. """ health_check = proto.Field(proto.STRING, number=308876645, optional=True,) @@ -16652,13 +18612,18 @@ class HealthCheckReference(proto.Message): class HealthCheckService(proto.Message): r"""Represents a Health-Check as a Service resource. + Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -16670,6 +18635,8 @@ class HealthCheckService(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the HealthCheckService. + + This field is a member of `oneof`_ ``_fingerprint``. health_checks (Sequence[str]): A list of URLs to the HealthCheck resources. Must have at least one HealthCheck, and not more than 10. HealthCheck @@ -16690,12 +18657,18 @@ class HealthCheckService(proto.Message): endpoint reports UNHEALTHY, then UNHEALTHY is the HealthState of the endpoint. If all health checks report HEALTHY, the HealthState of the endpoint is HEALTHY. . + + This field is a member of `oneof`_ ``_health_status_aggregation_policy``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output only] Type of the resource. Always compute#healthCheckServicefor health check services. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 @@ -16704,6 +18677,8 @@ class HealthCheckService(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network_endpoint_groups (Sequence[str]): A list of URLs to the NetworkEndpointGroup resources. Must not have more than 100. For @@ -16724,8 +18699,12 @@ class HealthCheckService(proto.Message): health check services. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ class HealthStatusAggregationPolicy(proto.Enum): @@ -16769,6 +18748,7 @@ class HealthCheckServiceReference(proto.Message): Attributes: health_check_service (str): + This field is a member of `oneof`_ ``_health_check_service``. """ health_check_service = proto.Field(proto.STRING, number=408374747, optional=True,) @@ -16776,16 +18756,21 @@ class HealthCheckServiceReference(proto.Message): class HealthCheckServicesList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.HealthCheckService]): A list of HealthCheckService resources. kind (str): [Output Only] Type of the resource. Always compute#healthCheckServicesList for lists of HealthCheckServices. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -16793,10 +18778,16 @@ class HealthCheckServicesList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -16817,14 +18808,19 @@ def raw_page(self): class HealthChecksAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.HealthChecksAggregatedList.ItemsEntry]): A list of HealthChecksScopedList resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -16832,12 +18828,18 @@ class HealthChecksAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -16859,6 +18861,7 @@ def raw_page(self): class HealthChecksScopedList(proto.Message): r""" + Attributes: health_checks (Sequence[google.cloud.compute_v1.types.HealthCheck]): A list of HealthChecks contained in this @@ -16866,6 +18869,8 @@ class HealthChecksScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ health_checks = proto.RepeatedField( @@ -16878,6 +18883,7 @@ class HealthChecksScopedList(proto.Message): class HealthStatus(proto.Message): r""" + Attributes: annotations (Sequence[google.cloud.compute_v1.types.HealthStatus.AnnotationsEntry]): Metadata defined as annotations for network @@ -16885,26 +18891,40 @@ class HealthStatus(proto.Message): forwarding_rule (str): URL of the forwarding rule associated with the health status of the instance. + + This field is a member of `oneof`_ ``_forwarding_rule``. forwarding_rule_ip (str): A forwarding rule IP address assigned to this instance. + + This field is a member of `oneof`_ ``_forwarding_rule_ip``. health_state (google.cloud.compute_v1.types.HealthStatus.HealthState): Health state of the instance. + + This field is a member of `oneof`_ ``_health_state``. instance (str): URL of the instance resource. + + This field is a member of `oneof`_ ``_instance``. ip_address (str): For target pool based Network Load Balancing, it indicates the forwarding rule's IP address assigned to this instance. For other types of load balancing, the field indicates VM internal ip. + + This field is a member of `oneof`_ ``_ip_address``. port (int): The named port of the instance group, not necessarily the port that is health-checked. + + This field is a member of `oneof`_ ``_port``. weight (str): + This field is a member of `oneof`_ ``_weight``. weight_error (google.cloud.compute_v1.types.HealthStatus.WeightError): + This field is a member of `oneof`_ ``_weight_error``. """ class HealthState(proto.Enum): @@ -16938,23 +18958,34 @@ class WeightError(proto.Enum): class HealthStatusForNetworkEndpoint(proto.Message): r""" + Attributes: backend_service (google.cloud.compute_v1.types.BackendServiceReference): URL of the backend service associated with the health state of the network endpoint. + + This field is a member of `oneof`_ ``_backend_service``. forwarding_rule (google.cloud.compute_v1.types.ForwardingRuleReference): URL of the forwarding rule associated with the health state of the network endpoint. + + This field is a member of `oneof`_ ``_forwarding_rule``. health_check (google.cloud.compute_v1.types.HealthCheckReference): URL of the health check associated with the health state of the network endpoint. + + This field is a member of `oneof`_ ``_health_check``. health_check_service (google.cloud.compute_v1.types.HealthCheckServiceReference): URL of the health check service associated with the health state of the network endpoint. + + This field is a member of `oneof`_ ``_health_check_service``. health_state (google.cloud.compute_v1.types.HealthStatusForNetworkEndpoint.HealthState): Health state of the network endpoint determined based on the health checks configured. + + This field is a member of `oneof`_ ``_health_state``. """ class HealthState(proto.Enum): @@ -17002,6 +19033,8 @@ class HostRule(proto.Message): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. hosts (Sequence[str]): The list of host patterns to match. They must be valid hostnames with optional port numbers in the format @@ -17014,6 +19047,8 @@ class HostRule(proto.Message): The name of the PathMatcher to use to match the path portion of the URL if the hostRule matches the URL's host portion. + + This field is a member of `oneof`_ ``_path_matcher``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -17034,11 +19069,15 @@ class HttpFaultAbort(proto.Message): this mapping table. HTTP status 200 is mapped to gRPC status UNKNOWN. Injecting an OK status is currently not supported by Traffic Director. + + This field is a member of `oneof`_ ``_http_status``. percentage (float): The percentage of traffic (connections/operations/requests) which will be aborted as part of fault injection. The value must be between 0.0 and 100.0 inclusive. + + This field is a member of `oneof`_ ``_percentage``. """ http_status = proto.Field(proto.UINT32, number=468949897, optional=True,) @@ -17054,12 +19093,16 @@ class HttpFaultDelay(proto.Message): fixed_delay (google.cloud.compute_v1.types.Duration): Specifies the value of the fixed delay interval. + + This field is a member of `oneof`_ ``_fixed_delay``. percentage (float): The percentage of traffic (connections/operations/requests) on which delay will be introduced as part of fault injection. The value must be between 0.0 and 100.0 inclusive. + + This field is a member of `oneof`_ ``_percentage``. """ fixed_delay = proto.Field( @@ -17081,10 +19124,14 @@ class HttpFaultInjection(proto.Message): abort (google.cloud.compute_v1.types.HttpFaultAbort): The specification for how client requests are aborted as part of fault injection. + + This field is a member of `oneof`_ ``_abort``. delay (google.cloud.compute_v1.types.HttpFaultDelay): The specification for how client requests are delayed as part of fault injection, before being sent to a backend service. + + This field is a member of `oneof`_ ``_delay``. """ abort = proto.Field( @@ -17129,12 +19176,15 @@ class HttpHeaderAction(proto.Message): class HttpHeaderMatch(proto.Message): r"""matchRule criteria for request header matches. + Attributes: exact_match (str): The value should exactly match contents of exactMatch. Only one of exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch must be set. + + This field is a member of `oneof`_ ``_exact_match``. header_name (str): The name of the HTTP header to match. For matching against the HTTP request's authority, use a headerMatch with the @@ -17149,17 +19199,23 @@ class HttpHeaderMatch(proto.Message): ``grpc-accept-encoding``, ``grpc-encoding``, ``grpc-previous-rpc-attempts``, ``grpc-tags-bin``, ``grpc-timeout`` and \`grpc-trace-bin. + + This field is a member of `oneof`_ ``_header_name``. invert_match (bool): If set to false, the headerMatch is considered a match if the match criteria above are met. If set to true, the headerMatch is considered a match if the match criteria above are NOT met. The default setting is false. + + This field is a member of `oneof`_ ``_invert_match``. prefix_match (str): The value of the header must start with the contents of prefixMatch. Only one of exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch must be set. + + This field is a member of `oneof`_ ``_prefix_match``. present_match (bool): A header with the contents of headerName must exist. The match takes place whether or not the @@ -17167,6 +19223,8 @@ class HttpHeaderMatch(proto.Message): exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch must be set. + + This field is a member of `oneof`_ ``_present_match``. range_match (google.cloud.compute_v1.types.Int64RangeMatch): The header value must be an integer and its value must be in the range specified in rangeMatch. If the header does not @@ -17177,6 +19235,8 @@ class HttpHeaderMatch(proto.Message): regexMatch, presentMatch or rangeMatch must be set. Note that rangeMatch is not supported for Loadbalancers that have their loadBalancingScheme set to EXTERNAL. + + This field is a member of `oneof`_ ``_range_match``. regex_match (str): The value of the header must match the regular expression specified in regexMatch. For regular expression grammar, @@ -17188,11 +19248,15 @@ class HttpHeaderMatch(proto.Message): regexMatch, presentMatch or rangeMatch must be set. Note that regexMatch only applies to Loadbalancers that have their loadBalancingScheme set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_regex_match``. suffix_match (str): The value of the header must end with the contents of suffixMatch. Only one of exactMatch, prefixMatch, suffixMatch, regexMatch, presentMatch or rangeMatch must be set. + + This field is a member of `oneof`_ ``_suffix_match``. """ exact_match = proto.Field(proto.STRING, number=457641093, optional=True,) @@ -17214,14 +19278,20 @@ class HttpHeaderOption(proto.Message): Attributes: header_name (str): The name of the header. + + This field is a member of `oneof`_ ``_header_name``. header_value (str): The value of the header to add. + + This field is a member of `oneof`_ ``_header_value``. replace (bool): If false, headerValue is appended to any values that already exist for the header. If true, headerValue is set for the header, discarding any values that were set for that header. The default value is false. + + This field is a member of `oneof`_ ``_replace``. """ header_name = proto.Field(proto.STRING, number=110223613, optional=True,) @@ -17231,22 +19301,29 @@ class HttpHeaderOption(proto.Message): class HttpQueryParameterMatch(proto.Message): r"""HttpRouteRuleMatch criteria for a request's query parameter. + Attributes: exact_match (str): The queryParameterMatch matches if the value of the parameter exactly matches the contents of exactMatch. Only one of presentMatch, exactMatch or regexMatch must be set. + + This field is a member of `oneof`_ ``_exact_match``. name (str): The name of the query parameter to match. The query parameter must exist in the request, in the absence of which the request match fails. + + This field is a member of `oneof`_ ``_name``. present_match (bool): Specifies that the queryParameterMatch matches if the request contains the query parameter, irrespective of whether the parameter has a value or not. Only one of presentMatch, exactMatch or regexMatch must be set. + + This field is a member of `oneof`_ ``_present_match``. regex_match (str): The queryParameterMatch matches if the value of the parameter matches the regular expression specified by @@ -17255,6 +19332,8 @@ class HttpQueryParameterMatch(proto.Message): exactMatch or regexMatch must be set. Note that regexMatch only applies when the loadBalancingScheme is set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_regex_match``. """ exact_match = proto.Field(proto.STRING, number=457641093, optional=True,) @@ -17265,12 +19344,15 @@ class HttpQueryParameterMatch(proto.Message): class HttpRedirectAction(proto.Message): r"""Specifies settings for an HTTP redirect. + Attributes: host_redirect (str): The host that will be used in the redirect response instead of the one that was supplied in the request. The value must be between 1 and 255 characters. + + This field is a member of `oneof`_ ``_host_redirect``. https_redirect (bool): If set to true, the URL scheme in the redirected request is set to https. If set to @@ -17280,6 +19362,8 @@ class HttpRedirectAction(proto.Message): TargetHttpProxys. Setting this true for TargetHttpsProxy is not permitted. The default is set to false. + + This field is a member of `oneof`_ ``_https_redirect``. path_redirect (str): The path that will be used in the redirect response instead of the one that was supplied in @@ -17289,6 +19373,8 @@ class HttpRedirectAction(proto.Message): the original request will be used for the redirect. The value must be between 1 and 1024 characters. + + This field is a member of `oneof`_ ``_path_redirect``. prefix_redirect (str): The prefix that replaces the prefixMatch specified in the HttpRouteRuleMatch, retaining @@ -17299,6 +19385,8 @@ class HttpRedirectAction(proto.Message): the path of the original request will be used for the redirect. The value must be between 1 and 1024 characters. + + This field is a member of `oneof`_ ``_prefix_redirect``. redirect_response_code (google.cloud.compute_v1.types.HttpRedirectAction.RedirectResponseCode): The HTTP Status code to use for this RedirectAction. Supported values are: - MOVED_PERMANENTLY_DEFAULT, which is @@ -17308,12 +19396,16 @@ class HttpRedirectAction(proto.Message): the request method will be retained. - PERMANENT_REDIRECT, which corresponds to 308. In this case, the request method will be retained. + + This field is a member of `oneof`_ ``_redirect_response_code``. strip_query (bool): If set to true, any accompanying query portion of the original URL is removed prior to redirecting the request. If set to false, the query portion of the original URL is retained. The default is set to false. + + This field is a member of `oneof`_ ``_strip_query``. """ class RedirectResponseCode(proto.Enum): @@ -17344,11 +19436,14 @@ class RedirectResponseCode(proto.Enum): class HttpRetryPolicy(proto.Message): r"""The retry policy associates with HttpRouteRule + Attributes: num_retries (int): Specifies the allowed number retries. This number must be > 0. If not specified, defaults to 1. + + This field is a member of `oneof`_ ``_num_retries``. per_try_timeout (google.cloud.compute_v1.types.Duration): Specifies a non-zero timeout per retry attempt. If not specified, will use the timeout @@ -17356,6 +19451,8 @@ class HttpRetryPolicy(proto.Message): HttpRouteAction is not set, will use the largest timeout among all backend services associated with the route. + + This field is a member of `oneof`_ ``_per_try_timeout``. retry_conditions (Sequence[str]): Specifies one or more conditions when this retry policy applies. Valid values are: - 5xx: Retry will be attempted if @@ -17394,6 +19491,7 @@ class HttpRetryPolicy(proto.Message): class HttpRouteAction(proto.Message): r""" + Attributes: cors_policy (google.cloud.compute_v1.types.CorsPolicy): The specification for allowing client side @@ -17401,6 +19499,8 @@ class HttpRouteAction(proto.Message): Recommendation for Cross Origin Resource Sharing Not supported when the URL map is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_cors_policy``. fault_injection_policy (google.cloud.compute_v1.types.HttpFaultInjection): The specification for fault injection introduced into traffic to test the resiliency of clients to backend service @@ -17412,6 +19512,8 @@ class HttpRouteAction(proto.Message): percentage of requests. For the requests impacted by fault injection, timeout and retry_policy will be ignored by clients that are configured with a fault_injection_policy. + + This field is a member of `oneof`_ ``_fault_injection_policy``. max_stream_duration (google.cloud.compute_v1.types.Duration): Specifies the maximum duration (timeout) for streams on the selected route. Unlike the timeout field where the timeout @@ -17425,6 +19527,8 @@ class HttpRouteAction(proto.Message): the route. This field is only allowed if the Url map is used with backend services with loadBalancingScheme set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_max_stream_duration``. request_mirror_policy (google.cloud.compute_v1.types.RequestMirrorPolicy): Specifies the policy on how requests intended for the route's backends are shadowed to a @@ -17435,11 +19539,15 @@ class HttpRouteAction(proto.Message): with -shadow. Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_request_mirror_policy``. retry_policy (google.cloud.compute_v1.types.HttpRetryPolicy): Specifies the retry policy associated with this route. Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_retry_policy``. timeout (google.cloud.compute_v1.types.Duration): Specifies the timeout for the selected route. Timeout is computed from the time the request @@ -17451,6 +19559,8 @@ class HttpRouteAction(proto.Message): Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_timeout``. url_rewrite (google.cloud.compute_v1.types.UrlRewrite): The spec to modify the URL of the request, prior to forwarding the request to the matched @@ -17459,6 +19569,8 @@ class HttpRouteAction(proto.Message): Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_url_rewrite``. weighted_backend_services (Sequence[google.cloud.compute_v1.types.WeightedBackendService]): A list of weighted backend services to send traffic to when a route match occurs. The @@ -17511,6 +19623,8 @@ class HttpRouteRule(proto.Message): The short description conveying the intent of this routeRule. The description can have a maximum length of 1024 characters. + + This field is a member of `oneof`_ ``_description``. header_action (google.cloud.compute_v1.types.HttpHeaderAction): Specifies changes to request and response headers that need to take effect for the selected backendService. The @@ -17521,6 +19635,8 @@ class HttpRouteRule(proto.Message): that have their loadBalancingScheme set to EXTERNAL. Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_header_action``. match_rules (Sequence[google.cloud.compute_v1.types.HttpRouteRuleMatch]): The list of criteria for matching attributes of a request to this routeRule. This list has OR @@ -17548,6 +19664,8 @@ class HttpRouteRule(proto.Message): could add rules numbered from 6 to 8, 10 to 11, and 13 to 15 in the future without any impact on existing rules. + + This field is a member of `oneof`_ ``_priority``. route_action (google.cloud.compute_v1.types.HttpRouteAction): In response to a matching matchRule, the load balancer performs advanced routing actions like @@ -17562,6 +19680,8 @@ class HttpRouteRule(proto.Message): UrlMaps for external HTTP(S) load balancers support only the urlRewrite action within a routeRule's routeAction. + + This field is a member of `oneof`_ ``_route_action``. service (str): The full or partial URL of the backend service resource to which traffic is directed if @@ -17575,12 +19695,16 @@ class HttpRouteRule(proto.Message): weightedBackendServices, service must not be specified. Only one of urlRedirect, service or routeAction.weightedBackendService must be set. + + This field is a member of `oneof`_ ``_service``. url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): When this rule is matched, the request is redirected to a URL specified by urlRedirect. If urlRedirect is specified, service or routeAction must not be set. Not supported when the URL map is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_url_redirect``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -17614,6 +19738,8 @@ class HttpRouteRuleMatch(proto.Message): the original URL. fullPathMatch must be between 1 and 1024 characters. Only one of prefixMatch, fullPathMatch or regexMatch must be specified. + + This field is a member of `oneof`_ ``_full_path_match``. header_matches (Sequence[google.cloud.compute_v1.types.HttpHeaderMatch]): Specifies a list of header match criteria, all of which must match corresponding headers in @@ -17624,6 +19750,8 @@ class HttpRouteRuleMatch(proto.Message): false. ignoreCase must not be used with regexMatch. Not supported when the URL map is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_ignore_case``. metadata_filters (Sequence[google.cloud.compute_v1.types.MetadataFilter]): Opaque filter criteria used by Loadbalancer to restrict routing configuration to a limited set of xDS compliant @@ -17652,6 +19780,8 @@ class HttpRouteRuleMatch(proto.Message): The value must be between 1 and 1024 characters. Only one of prefixMatch, fullPathMatch or regexMatch must be specified. + + This field is a member of `oneof`_ ``_prefix_match``. query_parameter_matches (Sequence[google.cloud.compute_v1.types.HttpQueryParameterMatch]): Specifies a list of query parameter match criteria, all of which must match corresponding @@ -17667,6 +19797,8 @@ class HttpRouteRuleMatch(proto.Message): specified. Note that regexMatch only applies to Loadbalancers that have their loadBalancingScheme set to INTERNAL_SELF_MANAGED. + + This field is a member of `oneof`_ ``_regex_match``. """ full_path_match = proto.Field(proto.STRING, number=214598875, optional=True,) @@ -17693,18 +19825,28 @@ class Image(proto.Message): archive_size_bytes (int): Size of the image tar.gz archive stored in Google Cloud Storage (in bytes). + + This field is a member of `oneof`_ ``_archive_size_bytes``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): The deprecation status associated with this image. + + This field is a member of `oneof`_ ``_deprecated``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. disk_size_gb (int): Size of the image when restored onto a persistent disk (in GB). + + This field is a member of `oneof`_ ``_disk_size_gb``. family (str): The name of the image family to which this image belongs. You can create disks by @@ -17712,6 +19854,8 @@ class Image(proto.Message): image name. The image family always returns its latest image that is not deprecated. The name of the image family must comply with RFC1035. + + This field is a member of `oneof`_ ``_family``. guest_os_features (Sequence[google.cloud.compute_v1.types.GuestOsFeature]): A list of features to enable on the guest operating system. Applicable only for bootable @@ -17720,6 +19864,8 @@ class Image(proto.Message): id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): Encrypts the image using a customer-supplied encryption key. After you encrypt an image with @@ -17732,9 +19878,13 @@ class Image(proto.Message): disk will be encrypted using an automatically generated key and you do not need to provide a key to use the image later. + + This field is a member of `oneof`_ ``_image_encryption_key``. kind (str): [Output Only] Type of the resource. Always compute#image for images. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for the labels being applied to this image, which is essentially a hash of the @@ -17747,6 +19897,8 @@ class Image(proto.Message): with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an image. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.Image.LabelsEntry]): Labels to apply to this image. These can be later modified by the setLabels method. @@ -17764,15 +19916,25 @@ class Image(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. raw_disk (google.cloud.compute_v1.types.RawDisk): The parameters of the raw disk image. + + This field is a member of `oneof`_ ``_raw_disk``. satisfies_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. shielded_instance_initial_state (google.cloud.compute_v1.types.InitialStateConfig): Set the secure boot keys of shielded instance. + + This field is a member of `oneof`_ ``_shielded_instance_initial_state``. source_disk (str): URL of the source disk used to create this image. For example, the following are valid @@ -17785,15 +19947,21 @@ class Image(proto.Message): of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL + + This field is a member of `oneof`_ ``_source_disk``. source_disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_disk_encryption_key``. source_disk_id (str): [Output Only] The ID value of the disk used to create this image. This value may be used to determine whether the image was taken from the current or a previous instance of a given disk name. + + This field is a member of `oneof`_ ``_source_disk_id``. source_image (str): URL of the source image used to create this image. The following are valid formats for the URL: - @@ -17804,15 +19972,21 @@ class Image(proto.Message): one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL + + This field is a member of `oneof`_ ``_source_image``. source_image_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source image. Required if the source image is protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_image_encryption_key``. source_image_id (str): [Output Only] The ID value of the image used to create this image. This value may be used to determine whether the image was taken from the current or a previous instance of a given image name. + + This field is a member of `oneof`_ ``_source_image_id``. source_snapshot (str): URL of the source snapshot used to create this image. The following are valid formats for the URL: - @@ -17823,24 +19997,34 @@ class Image(proto.Message): of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL + + This field is a member of `oneof`_ ``_source_snapshot``. source_snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source snapshot. Required if the source snapshot is protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. source_snapshot_id (str): [Output Only] The ID value of the snapshot used to create this image. This value may be used to determine whether the snapshot was taken from the current or a previous instance of a given snapshot name. + + This field is a member of `oneof`_ ``_source_snapshot_id``. source_type (google.cloud.compute_v1.types.Image.SourceType): The type of the image used to create this disk. The default and only value is RAW + + This field is a member of `oneof`_ ``_source_type``. status (google.cloud.compute_v1.types.Image.Status): [Output Only] The status of the image. An image can be used to create other resources, such as instances, only after the image has been successfully created and the status is set to READY. Possible values are FAILED, PENDING, or READY. + + This field is a member of `oneof`_ ``_status``. storage_locations (Sequence[str]): Cloud Storage bucket storage location of the image (regional or multi-regional). @@ -17918,11 +20102,14 @@ class Status(proto.Enum): class ImageFamilyView(proto.Message): r""" + Attributes: image (google.cloud.compute_v1.types.Image): The latest image that is part of the specified image family in the requested location, and that is not deprecated. + + This field is a member of `oneof`_ ``_image``. """ image = proto.Field( @@ -17932,14 +20119,19 @@ class ImageFamilyView(proto.Message): class ImageList(proto.Message): r"""Contains a list of images. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Image]): A list of Image resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -17947,10 +20139,16 @@ class ImageList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -17980,6 +20178,8 @@ class InitialStateConfig(proto.Message): The Key Exchange Key (KEK). pk (google.cloud.compute_v1.types.FileContentBuffer): The Platform Key (PK). + + This field is a member of `oneof`_ ``_pk``. """ dbs = proto.RepeatedField(proto.MESSAGE, number=99253, message="FileContentBuffer",) @@ -18021,6 +20221,8 @@ class InsertAddressRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ address_resource = proto.Field(proto.MESSAGE, number=483888121, message="Address",) @@ -18054,6 +20256,8 @@ class InsertAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): Name of the zone for this request. """ @@ -18091,6 +20295,8 @@ class InsertBackendBucketRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_bucket_resource = proto.Field( @@ -18125,6 +20331,8 @@ class InsertBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service_resource = proto.Field( @@ -18159,9 +20367,13 @@ class InsertDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. source_image (str): Source image to restore onto a disk. This field is optional. + + This field is a member of `oneof`_ ``_source_image``. zone (str): The name of the zone for this request. """ @@ -18198,6 +20410,8 @@ class InsertExternalVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ external_vpn_gateway_resource = proto.Field( @@ -18219,6 +20433,8 @@ class InsertFirewallPolicyRequest(proto.Message): "folders/[FOLDER_ID]" if the parent is a folder or "organizations/[ORGANIZATION_ID]" if the parent is an organization. + + This field is a member of `oneof`_ ``_parent_id``. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -18235,6 +20451,8 @@ class InsertFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy_resource = proto.Field( @@ -18269,6 +20487,8 @@ class InsertFirewallRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_resource = proto.Field(proto.MESSAGE, number=41425005, message="Firewall",) @@ -18303,6 +20523,8 @@ class InsertForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ forwarding_rule_resource = proto.Field( @@ -18338,6 +20560,8 @@ class InsertGlobalAddressRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ address_resource = proto.Field(proto.MESSAGE, number=483888121, message="Address",) @@ -18370,6 +20594,8 @@ class InsertGlobalForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ forwarding_rule_resource = proto.Field( @@ -18404,6 +20630,8 @@ class InsertGlobalNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network_endpoint_group_resource = proto.Field( @@ -18436,6 +20664,8 @@ class InsertGlobalPublicDelegatedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -18470,6 +20700,8 @@ class InsertHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check_resource = proto.Field( @@ -18486,6 +20718,8 @@ class InsertImageRequest(proto.Message): Attributes: force_create (bool): Force image creation if true. + + This field is a member of `oneof`_ ``_force_create``. image_resource (google.cloud.compute_v1.types.Image): The body resource for this request project (str): @@ -18506,6 +20740,8 @@ class InsertImageRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ force_create = proto.Field(proto.BOOL, number=197723344, optional=True,) @@ -18539,6 +20775,8 @@ class InsertInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where you want to create the managed instance group. @@ -18577,6 +20815,8 @@ class InsertInstanceGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where you want to create the instance group. @@ -18615,6 +20855,8 @@ class InsertInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. source_instance_template (str): Specifies instance template to create the instance. This field is optional. It can be a @@ -18624,6 +20866,8 @@ class InsertInstanceRequest(proto.Message): /global/instanceTemplates/instanceTemplate - projects/project/global/instanceTemplates/instanceTemplate - global/instanceTemplates/instanceTemplate + + This field is a member of `oneof`_ ``_source_instance_template``. zone (str): The name of the zone for this request. """ @@ -18664,6 +20908,8 @@ class InsertInstanceTemplateRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_template_resource = proto.Field( @@ -18700,8 +20946,12 @@ class InsertInterconnectAttachmentRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. validate_only (bool): If true, the request will not be committed. + + This field is a member of `oneof`_ ``_validate_only``. """ interconnect_attachment_resource = proto.Field( @@ -18738,6 +20988,8 @@ class InsertInterconnectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ interconnect_resource = proto.Field( @@ -18772,6 +21024,8 @@ class InsertLicenseRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ license_resource = proto.Field(proto.MESSAGE, number=437955148, message="License",) @@ -18804,6 +21058,8 @@ class InsertNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where you want to create the network endpoint group. It should comply @@ -18843,6 +21099,8 @@ class InsertNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network_resource = proto.Field(proto.MESSAGE, number=122105599, message="Network",) @@ -18877,6 +21135,8 @@ class InsertNodeGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -18917,6 +21177,8 @@ class InsertNodeTemplateRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ node_template_resource = proto.Field( @@ -18954,6 +21216,8 @@ class InsertPacketMirroringRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ packet_mirroring_resource = proto.Field( @@ -18987,6 +21251,8 @@ class InsertPublicAdvertisedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -19021,6 +21287,8 @@ class InsertPublicDelegatedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -19058,6 +21326,8 @@ class InsertRegionAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ autoscaler_resource = proto.Field( @@ -19095,6 +21365,8 @@ class InsertRegionBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service_resource = proto.Field( @@ -19132,6 +21404,8 @@ class InsertRegionCommitmentRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ commitment_resource = proto.Field( @@ -19169,9 +21443,13 @@ class InsertRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. source_image (str): Source image to restore onto a disk. This field is optional. + + This field is a member of `oneof`_ ``_source_image``. """ disk_resource = proto.Field(proto.MESSAGE, number=25880688, message="Disk",) @@ -19208,6 +21486,8 @@ class InsertRegionHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check_resource = proto.Field( @@ -19245,6 +21525,8 @@ class InsertRegionHealthCheckServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check_service_resource = proto.Field( @@ -19282,6 +21564,8 @@ class InsertRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager_resource = proto.Field( @@ -19321,6 +21605,8 @@ class InsertRegionNetworkEndpointGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network_endpoint_group_resource = proto.Field( @@ -19356,6 +21642,8 @@ class InsertRegionNotificationEndpointRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ notification_endpoint_resource = proto.Field( @@ -19389,6 +21677,8 @@ class InsertRegionSslCertificateRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): The body resource for this request """ @@ -19426,6 +21716,8 @@ class InsertRegionTargetHttpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): The body resource for this request """ @@ -19461,6 +21753,8 @@ class InsertRegionTargetHttpsProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): The body resource for this request """ @@ -19485,6 +21779,8 @@ class InsertRegionUrlMapRequest(proto.Message): request_id (str): begin_interface: MixerMutationRequestBuilder Request ID to support idempotency. + + This field is a member of `oneof`_ ``_request_id``. url_map_resource (google.cloud.compute_v1.types.UrlMap): The body resource for this request """ @@ -19518,6 +21814,8 @@ class InsertReservationRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. reservation_resource (google.cloud.compute_v1.types.Reservation): The body resource for this request zone (str): @@ -19557,6 +21855,8 @@ class InsertResourcePolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource_policy_resource (google.cloud.compute_v1.types.ResourcePolicy): The body resource for this request """ @@ -19592,6 +21892,8 @@ class InsertRouteRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. route_resource (google.cloud.compute_v1.types.Route): The body resource for this request """ @@ -19626,6 +21928,8 @@ class InsertRouterRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. router_resource (google.cloud.compute_v1.types.Router): The body resource for this request """ @@ -19659,6 +21963,8 @@ class InsertSecurityPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): The body resource for this request """ @@ -19693,6 +21999,8 @@ class InsertServiceAttachmentRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. service_attachment_resource (google.cloud.compute_v1.types.ServiceAttachment): The body resource for this request """ @@ -19726,6 +22034,8 @@ class InsertSslCertificateRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): The body resource for this request """ @@ -19760,6 +22070,8 @@ class InsertSslPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): The body resource for this request """ @@ -19796,6 +22108,8 @@ class InsertSubnetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): The body resource for this request """ @@ -19829,6 +22143,8 @@ class InsertTargetGrpcProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_grpc_proxy_resource (google.cloud.compute_v1.types.TargetGrpcProxy): The body resource for this request """ @@ -19863,6 +22179,8 @@ class InsertTargetHttpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy_resource (google.cloud.compute_v1.types.TargetHttpProxy): The body resource for this request """ @@ -19897,6 +22215,8 @@ class InsertTargetHttpsProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy_resource (google.cloud.compute_v1.types.TargetHttpsProxy): The body resource for this request """ @@ -19931,6 +22251,8 @@ class InsertTargetInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_instance_resource (google.cloud.compute_v1.types.TargetInstance): The body resource for this request zone (str): @@ -19970,6 +22292,8 @@ class InsertTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool_resource (google.cloud.compute_v1.types.TargetPool): The body resource for this request """ @@ -20005,6 +22329,8 @@ class InsertTargetSslProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_ssl_proxy_resource (google.cloud.compute_v1.types.TargetSslProxy): The body resource for this request """ @@ -20039,6 +22365,8 @@ class InsertTargetTcpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_tcp_proxy_resource (google.cloud.compute_v1.types.TargetTcpProxy): The body resource for this request """ @@ -20075,6 +22403,8 @@ class InsertTargetVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_vpn_gateway_resource (google.cloud.compute_v1.types.TargetVpnGateway): The body resource for this request """ @@ -20110,6 +22440,8 @@ class InsertUrlMapRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. url_map_resource (google.cloud.compute_v1.types.UrlMap): The body resource for this request """ @@ -20144,6 +22476,8 @@ class InsertVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. vpn_gateway_resource (google.cloud.compute_v1.types.VpnGateway): The body resource for this request """ @@ -20181,6 +22515,8 @@ class InsertVpnTunnelRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. vpn_tunnel_resource (google.cloud.compute_v1.types.VpnTunnel): The body resource for this request """ @@ -20202,31 +22538,46 @@ class Instance(proto.Message): advanced_machine_features (google.cloud.compute_v1.types.AdvancedMachineFeatures): Controls for advanced machine-related behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. can_ip_forward (bool): Allows this instance to send and receive packets with non-matching destination or source IPs. This is required if you plan to use this instance to forward routes. For more information, see Enabling IP Forwarding . + + This field is a member of `oneof`_ ``_can_ip_forward``. confidential_instance_config (google.cloud.compute_v1.types.ConfidentialInstanceConfig): + This field is a member of `oneof`_ ``_confidential_instance_config``. cpu_platform (str): [Output Only] The CPU platform used by this instance. + + This field is a member of `oneof`_ ``_cpu_platform``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deletion_protection (bool): Whether the resource should be protected against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. disks (Sequence[google.cloud.compute_v1.types.AttachedDisk]): Array of disks associated with this instance. Persistent disks must be created before you can assign them. display_device (google.cloud.compute_v1.types.DisplayDevice): Enables display device for the instance. + + This field is a member of `oneof`_ ``_display_device``. fingerprint (str): Specifies a fingerprint for this resource, which is essentially a hash of the instance's @@ -20237,6 +22588,8 @@ class Instance(proto.Message): an up-to-date fingerprint hash in order to update the instance. To see the latest fingerprint, make get() request to the instance. + + This field is a member of `oneof`_ ``_fingerprint``. guest_accelerators (Sequence[google.cloud.compute_v1.types.AcceleratorConfig]): A list of the type and count of accelerator cards attached to the instance. @@ -20248,12 +22601,18 @@ class Instance(proto.Message): global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. + + This field is a member of `oneof`_ ``_hostname``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#instance for instances. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for this request, which is essentially a hash of the label's contents and @@ -20264,16 +22623,24 @@ class Instance(proto.Message): fingerprint hash in order to update or change labels. To see the latest fingerprint, make get() request to the instance. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.Instance.LabelsEntry]): Labels to apply to this instance. These can be later modified by the setLabels method. last_start_timestamp (str): [Output Only] Last start timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_start_timestamp``. last_stop_timestamp (str): [Output Only] Last stop timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_stop_timestamp``. last_suspended_timestamp (str): [Output Only] Last suspended timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_suspended_timestamp``. machine_type (str): Full or partial URL of the machine type resource to use for this instance, in the @@ -20294,16 +22661,22 @@ class Instance(proto.Message): central1-f/machineTypes/custom-4-5120 For a full list of restrictions, read the Specifications for custom machine types. + + This field is a member of `oneof`_ ``_machine_type``. metadata (google.cloud.compute_v1.types.Metadata): The metadata key/value pairs assigned to this instance. This includes custom metadata and predefined keys. + + This field is a member of `oneof`_ ``_metadata``. min_cpu_platform (str): Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy Bridge". + + This field is a member of `oneof`_ ``_min_cpu_platform``. name (str): The name of the resource, provided by the client when initially creating the resource. The resource name must be @@ -20313,6 +22686,8 @@ class Instance(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network_interfaces (Sequence[google.cloud.compute_v1.types.NetworkInterface]): An array of network configurations for this instance. These specify how interfaces are @@ -20322,18 +22697,28 @@ class Instance(proto.Message): private_ipv6_google_access (google.cloud.compute_v1.types.Instance.PrivateIpv6GoogleAccess): The private IPv6 google access type for the VM. If not specified, use INHERIT_FROM_SUBNETWORK as default. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. reservation_affinity (google.cloud.compute_v1.types.ReservationAffinity): Specifies the reservations that this instance can consume from. + + This field is a member of `oneof`_ ``_reservation_affinity``. resource_policies (Sequence[str]): Resource policies applied to this instance. satisfies_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. scheduling (google.cloud.compute_v1.types.Scheduling): Sets the scheduling options for this instance. + + This field is a member of `oneof`_ ``_scheduling``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. service_accounts (Sequence[google.cloud.compute_v1.types.ServiceAccount]): A list of service accounts, with their specified scopes, authorized for this instance. @@ -20345,20 +22730,28 @@ class Instance(proto.Message): information. shielded_instance_config (google.cloud.compute_v1.types.ShieldedInstanceConfig): + This field is a member of `oneof`_ ``_shielded_instance_config``. shielded_instance_integrity_policy (google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy): + This field is a member of `oneof`_ ``_shielded_instance_integrity_policy``. start_restricted (bool): [Output Only] Whether a VM has been restricted for start because Compute Engine has detected suspicious activity. + + This field is a member of `oneof`_ ``_start_restricted``. status (google.cloud.compute_v1.types.Instance.Status): [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, RUNNING, STOPPING, SUSPENDING, SUSPENDED, REPAIRING, and TERMINATED. For more information about the status of the instance, see Instance life cycle. + + This field is a member of `oneof`_ ``_status``. status_message (str): [Output Only] An optional, human-readable explanation of the status. + + This field is a member of `oneof`_ ``_status_message``. tags (google.cloud.compute_v1.types.Tags): Tags to apply to this instance. Tags are used to identify valid sources or targets for network @@ -20367,10 +22760,14 @@ class Instance(proto.Message): modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple tags can be specified via the 'tags.items' field. + + This field is a member of `oneof`_ ``_tags``. zone (str): [Output Only] URL of the zone where the instance resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. """ class PrivateIpv6GoogleAccess(proto.Enum): @@ -20477,10 +22874,13 @@ class Status(proto.Enum): class InstanceAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceAggregatedList.ItemsEntry]): An object that contains a list of instances scoped by zone. @@ -20488,6 +22888,8 @@ class InstanceAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#instanceAggregatedList for aggregated lists of Instance resources. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -20495,12 +22897,18 @@ class InstanceAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -20535,24 +22943,36 @@ class InstanceGroup(proto.Message): creation_timestamp (str): [Output Only] The creation timestamp for this instance group in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): [Output Only] The fingerprint of the named ports. The system uses this fingerprint to detect conflicts when multiple users change the named ports concurrently. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] A unique identifier for this instance group, generated by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] The resource type, which is always compute#instanceGroup for instance groups. + + This field is a member of `oneof`_ ``_kind``. name (str): The name of the instance group. The name must be 1-63 characters long, and comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. named_ports (Sequence[google.cloud.compute_v1.types.NamedPort]): Assigns a name to a port number. For example: {name: "http", port: 80} This allows the system to reference ports by the @@ -20566,24 +22986,36 @@ class InstanceGroup(proto.Message): network interfaces, then the network and subnetwork fields only refer to the network and subnet used by your primary interface (nic0). + + This field is a member of `oneof`_ ``_network``. region (str): [Output Only] The URL of the region where the instance group is located (for regional resources). + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] The URL for this instance group. The server generates this URL. + + This field is a member of `oneof`_ ``_self_link``. size (int): [Output Only] The total number of instances in the instance group. + + This field is a member of `oneof`_ ``_size``. subnetwork (str): [Output Only] The URL of the subnetwork to which all instances in the instance group belong. If your instance has multiple network interfaces, then the network and subnetwork fields only refer to the network and subnet used by your primary interface (nic0). + + This field is a member of `oneof`_ ``_subnetwork``. zone (str): [Output Only] The URL of the zone where the instance group is located (for zonal resources). + + This field is a member of `oneof`_ ``_zone``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -20605,16 +23037,21 @@ class InstanceGroup(proto.Message): class InstanceGroupAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceGroupAggregatedList.ItemsEntry]): A list of InstanceGroupsScopedList resources. kind (str): [Output Only] The resource type, which is always compute#instanceGroupAggregatedList for aggregated lists of instance groups. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -20622,12 +23059,18 @@ class InstanceGroupAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -20652,15 +23095,20 @@ def raw_page(self): class InstanceGroupList(proto.Message): r"""A list of InstanceGroup resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceGroup]): A list of InstanceGroup resources. kind (str): [Output Only] The resource type, which is always compute#instanceGroupList for instance group lists. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -20668,10 +23116,16 @@ class InstanceGroupList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -20709,19 +23163,29 @@ class InstanceGroupManager(proto.Message): and a random four-character string to the base instance name. The base instance name must comply with RFC1035. + + This field is a member of `oneof`_ ``_base_instance_name``. creation_timestamp (str): [Output Only] The creation timestamp for this managed instance group in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. current_actions (google.cloud.compute_v1.types.InstanceGroupManagerActionsSummary): [Output Only] The list of instance actions and the number of instances in this managed instance group that are scheduled for each of those actions. + + This field is a member of `oneof`_ ``_current_actions``. description (str): An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. distribution_policy (google.cloud.compute_v1.types.DistributionPolicy): Policy specifying the intended distribution of managed instances across zones in a regional managed instance group. + + This field is a member of `oneof`_ ``_distribution_policy``. fingerprint (str): Fingerprint of this resource. This field may be used in optimistic locking. It will be @@ -20732,11 +23196,17 @@ class InstanceGroupManager(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an InstanceGroupManager. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] A unique identifier for this resource type. The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. instance_group (str): [Output Only] The URL of the Instance Group resource. + + This field is a member of `oneof`_ ``_instance_group``. instance_template (str): The URL of the instance template that is specified for this managed instance group. The @@ -20746,13 +23216,19 @@ class InstanceGroupManager(proto.Message): not change unless you run recreateInstances, run applyUpdatesToInstances, or set the group's updatePolicy.type to PROACTIVE. + + This field is a member of `oneof`_ ``_instance_template``. kind (str): [Output Only] The resource type, which is always compute#instanceGroupManager for managed instance groups. + + This field is a member of `oneof`_ ``_kind``. name (str): The name of the managed instance group. The name must be 1-63 characters long, and comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. named_ports (Sequence[google.cloud.compute_v1.types.NamedPort]): Named ports configured for the Instance Groups complementary to this Instance Group @@ -20760,14 +23236,22 @@ class InstanceGroupManager(proto.Message): region (str): [Output Only] The URL of the region where the managed instance group resides (for regional resources). + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] The URL for this managed instance group. The server defines this URL. + + This field is a member of `oneof`_ ``_self_link``. stateful_policy (google.cloud.compute_v1.types.StatefulPolicy): Stateful configuration for this Instanced Group Manager + + This field is a member of `oneof`_ ``_stateful_policy``. status (google.cloud.compute_v1.types.InstanceGroupManagerStatus): [Output Only] The status of this managed instance group. + + This field is a member of `oneof`_ ``_status``. target_pools (Sequence[str]): The URLs for all TargetPool resources to which instances in the instanceGroup field are @@ -20780,9 +23264,13 @@ class InstanceGroupManager(proto.Message): number by using the instanceGroupManager deleteInstances or abandonInstances methods. Resizing the group also changes this number. + + This field is a member of `oneof`_ ``_target_size``. update_policy (google.cloud.compute_v1.types.InstanceGroupManagerUpdatePolicy): The update policy for this managed instance group. + + This field is a member of `oneof`_ ``_update_policy``. versions (Sequence[google.cloud.compute_v1.types.InstanceGroupManagerVersion]): Specifies the instance templates used by this managed instance group to create instances. Each @@ -20798,6 +23286,8 @@ class InstanceGroupManager(proto.Message): zone (str): [Output Only] The URL of a zone where the managed instance group is located (for zonal resources). + + This field is a member of `oneof`_ ``_zone``. """ auto_healing_policies = proto.RepeatedField( @@ -20853,12 +23343,15 @@ class InstanceGroupManager(proto.Message): class InstanceGroupManagerActionsSummary(proto.Message): r""" + Attributes: abandoning (int): [Output Only] The total number of instances in the managed instance group that are scheduled to be abandoned. Abandoning an instance removes it from the managed instance group without deleting it. + + This field is a member of `oneof`_ ``_abandoning``. creating (int): [Output Only] The number of instances in the managed instance group that are scheduled to be created or are @@ -20867,20 +23360,28 @@ class InstanceGroupManagerActionsSummary(proto.Message): instance successfully. If you have disabled creation retries, this field will not be populated; instead, the creatingWithoutRetries field will be populated. + + This field is a member of `oneof`_ ``_creating``. creating_without_retries (int): [Output Only] The number of instances that the managed instance group will attempt to create. The group attempts to create each instance only once. If the group fails to create any of these instances, it decreases the group's targetSize value accordingly. + + This field is a member of `oneof`_ ``_creating_without_retries``. deleting (int): [Output Only] The number of instances in the managed instance group that are scheduled to be deleted or are currently being deleted. + + This field is a member of `oneof`_ ``_deleting``. none (int): [Output Only] The number of instances in the managed instance group that are running and have no scheduled actions. + + This field is a member of `oneof`_ ``_none``. recreating (int): [Output Only] The number of instances in the managed instance group that are scheduled to be recreated or are @@ -20888,20 +23389,28 @@ class InstanceGroupManagerActionsSummary(proto.Message): deletes the existing root persistent disk and creates a new disk from the image that is defined in the instance template. + + This field is a member of `oneof`_ ``_recreating``. refreshing (int): [Output Only] The number of instances in the managed instance group that are being reconfigured with properties that do not require a restart or a recreate action. For example, setting or removing target pools for the instance. + + This field is a member of `oneof`_ ``_refreshing``. restarting (int): [Output Only] The number of instances in the managed instance group that are scheduled to be restarted or are currently being restarted. + + This field is a member of `oneof`_ ``_restarting``. verifying (int): [Output Only] The number of instances in the managed instance group that are being verified. See the managedInstances[].currentAction property in the listManagedInstances method documentation. + + This field is a member of `oneof`_ ``_verifying``. """ abandoning = proto.Field(proto.INT32, number=440023373, optional=True,) @@ -20919,10 +23428,13 @@ class InstanceGroupManagerActionsSummary(proto.Message): class InstanceGroupManagerAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceGroupManagerAggregatedList.ItemsEntry]): A list of InstanceGroupManagersScopedList resources. @@ -20930,6 +23442,8 @@ class InstanceGroupManagerAggregatedList(proto.Message): [Output Only] The resource type, which is always compute#instanceGroupManagerAggregatedList for an aggregated list of managed instance groups. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -20937,12 +23451,18 @@ class InstanceGroupManagerAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -20967,10 +23487,13 @@ def raw_page(self): class InstanceGroupManagerAutoHealingPolicy(proto.Message): r""" + Attributes: health_check (str): The URL for the health check that signals autohealing. + + This field is a member of `oneof`_ ``_health_check``. initial_delay_sec (int): The number of seconds that the managed instance group waits before it applies autohealing policies to new instances or @@ -20980,6 +23503,8 @@ class InstanceGroupManagerAutoHealingPolicy(proto.Message): prevents the managed instance group from recreating its instances prematurely. This value must be from range [0, 3600]. + + This field is a member of `oneof`_ ``_initial_delay_sec``. """ health_check = proto.Field(proto.STRING, number=308876645, optional=True,) @@ -20988,16 +23513,21 @@ class InstanceGroupManagerAutoHealingPolicy(proto.Message): class InstanceGroupManagerList(proto.Message): r"""[Output Only] A list of managed instance groups. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceGroupManager]): A list of InstanceGroupManager resources. kind (str): [Output Only] The resource type, which is always compute#instanceGroupManagerList for a list of managed instance groups. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -21005,10 +23535,16 @@ class InstanceGroupManagerList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -21029,10 +23565,13 @@ def raw_page(self): class InstanceGroupManagerStatus(proto.Message): r""" + Attributes: autoscaler (str): [Output Only] The URL of the Autoscaler that targets this instance group manager. + + This field is a member of `oneof`_ ``_autoscaler``. is_stable (bool): [Output Only] A bit indicating whether the managed instance group is in a stable state. A stable state means that: none @@ -21041,13 +23580,19 @@ class InstanceGroupManagerStatus(proto.Message): restart, or deletion); no future changes are scheduled for instances in the managed instance group; and the managed instance group itself is not being modified. + + This field is a member of `oneof`_ ``_is_stable``. stateful (google.cloud.compute_v1.types.InstanceGroupManagerStatusStateful): [Output Only] Stateful status of the given Instance Group Manager. + + This field is a member of `oneof`_ ``_stateful``. version_target (google.cloud.compute_v1.types.InstanceGroupManagerStatusVersionTarget): [Output Only] A status of consistency of Instances' versions with their target version specified by version field on Instance Group Manager. + + This field is a member of `oneof`_ ``_version_target``. """ autoscaler = proto.Field(proto.STRING, number=517258967, optional=True,) @@ -21068,6 +23613,7 @@ class InstanceGroupManagerStatus(proto.Message): class InstanceGroupManagerStatusStateful(proto.Message): r""" + Attributes: has_stateful_config (bool): [Output Only] A bit indicating whether the managed instance @@ -21077,9 +23623,13 @@ class InstanceGroupManagerStatusStateful(proto.Message): config even when there is still some preserved state on a managed instance, for example, if you have deleted all PICs but not yet applied those deletions. + + This field is a member of `oneof`_ ``_has_stateful_config``. per_instance_configs (google.cloud.compute_v1.types.InstanceGroupManagerStatusStatefulPerInstanceConfigs): [Output Only] Status of per-instance configs on the instance. + + This field is a member of `oneof`_ ``_per_instance_configs``. """ has_stateful_config = proto.Field(proto.BOOL, number=110474224, optional=True,) @@ -21093,12 +23643,15 @@ class InstanceGroupManagerStatusStateful(proto.Message): class InstanceGroupManagerStatusStatefulPerInstanceConfigs(proto.Message): r""" + Attributes: all_effective (bool): A bit indicating if all of the group's per- nstance configs (listed in the output of a listPerInstanceConfigs API call) have status EFFECTIVE or there are no per-instance-configs. + + This field is a member of `oneof`_ ``_all_effective``. """ all_effective = proto.Field(proto.BOOL, number=516540553, optional=True,) @@ -21106,6 +23659,7 @@ class InstanceGroupManagerStatusStatefulPerInstanceConfigs(proto.Message): class InstanceGroupManagerStatusVersionTarget(proto.Message): r""" + Attributes: is_reached (bool): [Output Only] A bit indicating whether version target has @@ -21113,6 +23667,8 @@ class InstanceGroupManagerStatusVersionTarget(proto.Message): instances are in their target version. Instances' target version are specified by version field on Instance Group Manager. + + This field is a member of `oneof`_ ``_is_reached``. """ is_reached = proto.Field(proto.BOOL, number=433209149, optional=True,) @@ -21120,6 +23676,7 @@ class InstanceGroupManagerStatusVersionTarget(proto.Message): class InstanceGroupManagerUpdatePolicy(proto.Message): r""" + Attributes: instance_redistribution_type (str): The instance redistribution policy for @@ -21129,6 +23686,8 @@ class InstanceGroupManagerUpdatePolicy(proto.Message): across zones in the region. - NONE: For non- autoscaled groups, proactive redistribution is disabled. + + This field is a member of `oneof`_ ``_instance_redistribution_type``. max_surge (google.cloud.compute_v1.types.FixedOrPercent): The maximum number of instances that can be created above the specified targetSize during @@ -21142,6 +23701,8 @@ class InstanceGroupManagerUpdatePolicy(proto.Message): least one of either maxSurge or maxUnavailable must be greater than 0. Learn more about maxSurge. + + This field is a member of `oneof`_ ``_max_surge``. max_unavailable (google.cloud.compute_v1.types.FixedOrPercent): The maximum number of instances that can be unavailable during the update process. An @@ -21162,6 +23723,8 @@ class InstanceGroupManagerUpdatePolicy(proto.Message): managed instance group operates. At least one of either maxSurge or maxUnavailable must be greater than 0. Learn more about maxUnavailable. + + This field is a member of `oneof`_ ``_max_unavailable``. minimal_action (str): Minimal action to be taken on an instance. You can specify either RESTART to restart @@ -21172,9 +23735,13 @@ class InstanceGroupManagerUpdatePolicy(proto.Message): the Updater determines that the minimal action you specify is not enough to perform the update, it might perform a more disruptive action. + + This field is a member of `oneof`_ ``_minimal_action``. replacement_method (str): What action should be used to replace instances. See minimal_action.REPLACE + + This field is a member of `oneof`_ ``_replacement_method``. type_ (str): The type of update process. You can specify either PROACTIVE so that the instance group @@ -21184,6 +23751,8 @@ class InstanceGroupManagerUpdatePolicy(proto.Message): executed but the update will be performed as part of other actions (for example, resizes or recreateInstances calls). + + This field is a member of `oneof`_ ``_type``. """ instance_redistribution_type = proto.Field( @@ -21202,6 +23771,7 @@ class InstanceGroupManagerUpdatePolicy(proto.Message): class InstanceGroupManagerVersion(proto.Message): r""" + Attributes: instance_template (str): The URL of the instance template that is specified for this @@ -21213,10 +23783,14 @@ class InstanceGroupManagerVersion(proto.Message): the group's updatePolicy.type to PROACTIVE; in those cases, existing instances are updated until the ``targetSize`` for this version is reached. + + This field is a member of `oneof`_ ``_instance_template``. name (str): Name of the version. Unique among all versions in the scope of this managed instance group. + + This field is a member of `oneof`_ ``_name``. target_size (google.cloud.compute_v1.types.FixedOrPercent): Specifies the intended number of instances to be created from the instanceTemplate. The final number of instances @@ -21228,6 +23802,8 @@ class InstanceGroupManagerVersion(proto.Message): the number is rounded. If unset, this version will update any remaining instances not updated by another version. Read Starting a canary update for more information. + + This field is a member of `oneof`_ ``_target_size``. """ instance_template = proto.Field(proto.STRING, number=309248228, optional=True,) @@ -21239,6 +23815,7 @@ class InstanceGroupManagerVersion(proto.Message): class InstanceGroupManagersAbandonInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[str]): The URLs of one or more instances to abandon. This can be a @@ -21251,12 +23828,15 @@ class InstanceGroupManagersAbandonInstancesRequest(proto.Message): class InstanceGroupManagersApplyUpdatesRequest(proto.Message): r"""InstanceGroupManagers.applyUpdatesToInstances + Attributes: all_instances (bool): Flag to update all instances instead of specified list of ���instances���. If the flag is set to true then the instances may not be specified in the request. + + This field is a member of `oneof`_ ``_all_instances``. instances (Sequence[str]): The list of URLs of one or more instances for which you want to apply updates. Each URL can be a full URL or a partial @@ -21272,6 +23852,8 @@ class InstanceGroupManagersApplyUpdatesRequest(proto.Message): update requires a more disruptive action than you set with this flag, the necessary action is performed to execute the update. + + This field is a member of `oneof`_ ``_minimal_action``. most_disruptive_allowed_action (str): The most disruptive action that you want to perform on each instance during the update: - @@ -21283,6 +23865,8 @@ class InstanceGroupManagersApplyUpdatesRequest(proto.Message): REPLACE. If your update requires a more disruptive action than you set with this flag, the update request will fail. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. """ all_instances = proto.Field(proto.BOOL, number=403676512, optional=True,) @@ -21295,6 +23879,7 @@ class InstanceGroupManagersApplyUpdatesRequest(proto.Message): class InstanceGroupManagersCreateInstancesRequest(proto.Message): r"""InstanceGroupManagers.createInstances + Attributes: instances (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): [Required] List of specifications of per-instance configs. @@ -21307,6 +23892,7 @@ class InstanceGroupManagersCreateInstancesRequest(proto.Message): class InstanceGroupManagersDeleteInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[str]): The URLs of one or more instances to delete. This can be a @@ -21322,6 +23908,8 @@ class InstanceGroupManagersDeleteInstancesRequest(proto.Message): malformed instance URL or a reference to an instance that exists in a zone or region other than the group's zone or region. + + This field is a member of `oneof`_ ``_skip_instances_on_validation_error``. """ instances = proto.RepeatedField(proto.STRING, number=29097598,) @@ -21332,6 +23920,7 @@ class InstanceGroupManagersDeleteInstancesRequest(proto.Message): class InstanceGroupManagersDeletePerInstanceConfigsReq(proto.Message): r"""InstanceGroupManagers.deletePerInstanceConfigs + Attributes: names (Sequence[str]): The list of instance names for which we want @@ -21344,6 +23933,7 @@ class InstanceGroupManagersDeletePerInstanceConfigsReq(proto.Message): class InstanceGroupManagersListErrorsResponse(proto.Message): r""" + Attributes: items (Sequence[google.cloud.compute_v1.types.InstanceManagedByIgmError]): [Output Only] The list of errors of the managed instance @@ -21355,6 +23945,8 @@ class InstanceGroupManagersListErrorsResponse(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. """ @property @@ -21369,6 +23961,7 @@ def raw_page(self): class InstanceGroupManagersListManagedInstancesResponse(proto.Message): r""" + Attributes: managed_instances (Sequence[google.cloud.compute_v1.types.ManagedInstance]): [Output Only] The list of instances in the managed instance @@ -21380,6 +23973,8 @@ class InstanceGroupManagersListManagedInstancesResponse(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. """ @property @@ -21394,6 +23989,7 @@ def raw_page(self): class InstanceGroupManagersListPerInstanceConfigsResp(proto.Message): r""" + Attributes: items (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): [Output Only] The list of PerInstanceConfig. @@ -21404,8 +24000,12 @@ class InstanceGroupManagersListPerInstanceConfigsResp(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -21423,6 +24023,7 @@ def raw_page(self): class InstanceGroupManagersPatchPerInstanceConfigsReq(proto.Message): r"""InstanceGroupManagers.patchPerInstanceConfigs + Attributes: per_instance_configs (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): The list of per-instance configs to insert or @@ -21436,6 +24037,7 @@ class InstanceGroupManagersPatchPerInstanceConfigsReq(proto.Message): class InstanceGroupManagersRecreateInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[str]): The URLs of one or more instances to recreate. This can be a @@ -21448,6 +24050,7 @@ class InstanceGroupManagersRecreateInstancesRequest(proto.Message): class InstanceGroupManagersScopedList(proto.Message): r""" + Attributes: instance_group_managers (Sequence[google.cloud.compute_v1.types.InstanceGroupManager]): [Output Only] The list of managed instance groups that are @@ -21455,6 +24058,8 @@ class InstanceGroupManagersScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] The warning that replaces the list of managed instance groups when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ instance_group_managers = proto.RepeatedField( @@ -21467,6 +24072,7 @@ class InstanceGroupManagersScopedList(proto.Message): class InstanceGroupManagersSetInstanceTemplateRequest(proto.Message): r""" + Attributes: instance_template (str): The URL of the instance template that is @@ -21477,6 +24083,8 @@ class InstanceGroupManagersSetInstanceTemplateRequest(proto.Message): not change unless you run recreateInstances, run applyUpdatesToInstances, or set the group's updatePolicy.type to PROACTIVE. + + This field is a member of `oneof`_ ``_instance_template``. """ instance_template = proto.Field(proto.STRING, number=309248228, optional=True,) @@ -21484,6 +24092,7 @@ class InstanceGroupManagersSetInstanceTemplateRequest(proto.Message): class InstanceGroupManagersSetTargetPoolsRequest(proto.Message): r""" + Attributes: fingerprint (str): The fingerprint of the target pools @@ -21495,6 +24104,8 @@ class InstanceGroupManagersSetTargetPoolsRequest(proto.Message): request to ensure that you do not overwrite changes that were applied from another concurrent request. + + This field is a member of `oneof`_ ``_fingerprint``. target_pools (Sequence[str]): The list of target pool URLs that instances in this managed instance group belong to. The @@ -21510,6 +24121,7 @@ class InstanceGroupManagersSetTargetPoolsRequest(proto.Message): class InstanceGroupManagersUpdatePerInstanceConfigsReq(proto.Message): r"""InstanceGroupManagers.updatePerInstanceConfigs + Attributes: per_instance_configs (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): The list of per-instance configs to insert or @@ -21523,6 +24135,7 @@ class InstanceGroupManagersUpdatePerInstanceConfigsReq(proto.Message): class InstanceGroupsAddInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[google.cloud.compute_v1.types.InstanceReference]): The list of instances to add to the instance @@ -21536,16 +24149,21 @@ class InstanceGroupsAddInstancesRequest(proto.Message): class InstanceGroupsListInstances(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceWithNamedPorts]): A list of InstanceWithNamedPorts resources. kind (str): [Output Only] The resource type, which is always compute#instanceGroupsListInstances for the list of instances in the specified instance group. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -21553,10 +24171,16 @@ class InstanceGroupsListInstances(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -21577,6 +24201,7 @@ def raw_page(self): class InstanceGroupsListInstancesRequest(proto.Message): r""" + Attributes: instance_state (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest.InstanceState): A filter for the state of the instances in @@ -21584,6 +24209,8 @@ class InstanceGroupsListInstancesRequest(proto.Message): RUNNING. If you do not specify this parameter the list includes all instances regardless of their state. + + This field is a member of `oneof`_ ``_instance_state``. """ class InstanceState(proto.Enum): @@ -21603,6 +24230,7 @@ class InstanceState(proto.Enum): class InstanceGroupsRemoveInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[google.cloud.compute_v1.types.InstanceReference]): The list of instances to remove from the @@ -21616,6 +24244,7 @@ class InstanceGroupsRemoveInstancesRequest(proto.Message): class InstanceGroupsScopedList(proto.Message): r""" + Attributes: instance_groups (Sequence[google.cloud.compute_v1.types.InstanceGroup]): [Output Only] The list of instance groups that are contained @@ -21623,6 +24252,8 @@ class InstanceGroupsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that replaces the list of instance groups when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ instance_groups = proto.RepeatedField( @@ -21635,6 +24266,7 @@ class InstanceGroupsScopedList(proto.Message): class InstanceGroupsSetNamedPortsRequest(proto.Message): r""" + Attributes: fingerprint (str): The fingerprint of the named ports @@ -21648,6 +24280,8 @@ class InstanceGroupsSetNamedPortsRequest(proto.Message): another concurrent request. A request with an incorrect fingerprint will fail with error 412 conditionNotMet. + + This field is a member of `oneof`_ ``_fingerprint``. named_ports (Sequence[google.cloud.compute_v1.types.NamedPort]): The list of named ports to set for this instance group. @@ -21661,15 +24295,20 @@ class InstanceGroupsSetNamedPortsRequest(proto.Message): class InstanceList(proto.Message): r"""Contains a list of instances. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Instance]): A list of Instance resources. kind (str): [Output Only] Type of resource. Always compute#instanceList for lists of Instance resources. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -21677,10 +24316,16 @@ class InstanceList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -21699,16 +24344,21 @@ def raw_page(self): class InstanceListReferrers(proto.Message): r"""Contains a list of instance referrers. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Reference]): A list of Reference resources. kind (str): [Output Only] Type of resource. Always compute#instanceListReferrers for lists of Instance referrers. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -21716,10 +24366,16 @@ class InstanceListReferrers(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -21738,16 +24394,23 @@ def raw_page(self): class InstanceManagedByIgmError(proto.Message): r""" + Attributes: error (google.cloud.compute_v1.types.InstanceManagedByIgmErrorManagedInstanceError): [Output Only] Contents of the error. + + This field is a member of `oneof`_ ``_error``. instance_action_details (google.cloud.compute_v1.types.InstanceManagedByIgmErrorInstanceActionDetails): [Output Only] Details of the instance action that triggered this error. May be null, if the error was not caused by an action on an instance. This field is optional. + + This field is a member of `oneof`_ ``_instance_action_details``. timestamp (str): [Output Only] The time that this error occurred. This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_timestamp``. """ error = proto.Field( @@ -21767,20 +24430,27 @@ class InstanceManagedByIgmError(proto.Message): class InstanceManagedByIgmErrorInstanceActionDetails(proto.Message): r""" + Attributes: action (google.cloud.compute_v1.types.InstanceManagedByIgmErrorInstanceActionDetails.Action): [Output Only] Action that managed instance group was executing on the instance when the error occurred. Possible values: + + This field is a member of `oneof`_ ``_action``. instance (str): [Output Only] The URL of the instance. The URL can be set even if the instance has not yet been created. + + This field is a member of `oneof`_ ``_instance``. version (google.cloud.compute_v1.types.ManagedInstanceVersion): [Output Only] Version this instance was created from, or was being created from, but the creation failed. Corresponds to one of the versions that were set on the Instance Group Manager resource at the time this instance was being created. + + This field is a member of `oneof`_ ``_version``. """ class Action(proto.Enum): @@ -21810,11 +24480,16 @@ class Action(proto.Enum): class InstanceManagedByIgmErrorManagedInstanceError(proto.Message): r""" + Attributes: code (str): [Output Only] Error code. + + This field is a member of `oneof`_ ``_code``. message (str): [Output Only] Error message. + + This field is a member of `oneof`_ ``_message``. """ code = proto.Field(proto.STRING, number=3059181, optional=True,) @@ -21823,6 +24498,7 @@ class InstanceManagedByIgmErrorManagedInstanceError(proto.Message): class InstanceMoveRequest(proto.Message): r""" + Attributes: destination_zone (str): The URL of the destination zone to move the @@ -21831,6 +24507,8 @@ class InstanceMoveRequest(proto.Message): zone: - https://www.googleapis.com/compute/v1/projects/project/zones/zone - projects/project/zones/zone - zones/zone + + This field is a member of `oneof`_ ``_destination_zone``. target_instance (str): The URL of the target instance to move. This can be a full or partial URL. For example, the @@ -21839,6 +24517,8 @@ class InstanceMoveRequest(proto.Message): /instances/instance - projects/project/zones/zone/instances/instance - zones/zone/instances/instance + + This field is a member of `oneof`_ ``_target_instance``. """ destination_zone = proto.Field(proto.STRING, number=131854653, optional=True,) @@ -21847,10 +24527,13 @@ class InstanceMoveRequest(proto.Message): class InstanceProperties(proto.Message): r""" + Attributes: advanced_machine_features (google.cloud.compute_v1.types.AdvancedMachineFeatures): Controls for advanced machine-related behavior features. + + This field is a member of `oneof`_ ``_advanced_machine_features``. can_ip_forward (bool): Enables instances created based on these properties to send packets with source IP @@ -21861,12 +24544,18 @@ class InstanceProperties(proto.Message): a Route resource, specify true. If unsure, leave this set to false. See the Enable IP forwarding documentation for more information. + + This field is a member of `oneof`_ ``_can_ip_forward``. confidential_instance_config (google.cloud.compute_v1.types.ConfidentialInstanceConfig): Specifies the Confidential Instance options. + + This field is a member of `oneof`_ ``_confidential_instance_config``. description (str): An optional text description for the instances that are created from these properties. + + This field is a member of `oneof`_ ``_description``. disks (Sequence[google.cloud.compute_v1.types.AttachedDisk]): An array of disks that are associated with the instances that are created from these @@ -21881,12 +24570,16 @@ class InstanceProperties(proto.Message): machine_type (str): The machine type to use for instances that are created from these properties. + + This field is a member of `oneof`_ ``_machine_type``. metadata (google.cloud.compute_v1.types.Metadata): The metadata key/value pairs to assign to instances that are created from these properties. These pairs can consist of custom metadata or predefined keys. See Project and instance metadata for more information. + + This field is a member of `oneof`_ ``_metadata``. min_cpu_platform (str): Minimum cpu/platform to be used by instances. The instance may be scheduled on the specified @@ -21896,15 +24589,21 @@ class InstanceProperties(proto.Message): minCpuPlatform: "Intel Sandy Bridge". For more information, read Specifying a Minimum CPU Platform. + + This field is a member of `oneof`_ ``_min_cpu_platform``. network_interfaces (Sequence[google.cloud.compute_v1.types.NetworkInterface]): An array of network access configurations for this interface. private_ipv6_google_access (google.cloud.compute_v1.types.InstanceProperties.PrivateIpv6GoogleAccess): The private IPv6 google access type for VMs. If not specified, use INHERIT_FROM_SUBNETWORK as default. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. reservation_affinity (google.cloud.compute_v1.types.ReservationAffinity): Specifies the reservations that instances can consume from. + + This field is a member of `oneof`_ ``_reservation_affinity``. resource_policies (Sequence[str]): Resource policies (names, not ULRs) applied to instances created from these properties. @@ -21912,6 +24611,8 @@ class InstanceProperties(proto.Message): Specifies the scheduling options for the instances that are created from these properties. + + This field is a member of `oneof`_ ``_scheduling``. service_accounts (Sequence[google.cloud.compute_v1.types.ServiceAccount]): A list of service accounts with specified scopes. Access tokens for these service accounts @@ -21920,6 +24621,7 @@ class InstanceProperties(proto.Message): obtain the access tokens for these instances. shielded_instance_config (google.cloud.compute_v1.types.ShieldedInstanceConfig): + This field is a member of `oneof`_ ``_shielded_instance_config``. tags (google.cloud.compute_v1.types.Tags): A list of tags to apply to the instances that are created from these properties. The tags @@ -21927,6 +24629,8 @@ class InstanceProperties(proto.Message): firewalls. The setTags method can modify this list of tags. Each tag within the list must comply with RFC1035. + + This field is a member of `oneof`_ ``_tags``. """ class PrivateIpv6GoogleAccess(proto.Enum): @@ -21986,10 +24690,13 @@ class PrivateIpv6GoogleAccess(proto.Enum): class InstanceReference(proto.Message): r""" + Attributes: instance (str): The URL for a specific instance. @required compute.instancegroups.addInstances/removeInstances + + This field is a member of `oneof`_ ``_instance``. """ instance = proto.Field(proto.STRING, number=18257045, optional=True,) @@ -22004,16 +24711,24 @@ class InstanceTemplate(proto.Message): creation_timestamp (str): [Output Only] The creation timestamp for this instance template in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] A unique identifier for this instance template. The server defines this identifier. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] The resource type, which is always compute#instanceTemplate for instance templates. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, @@ -22023,12 +24738,18 @@ class InstanceTemplate(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. properties (google.cloud.compute_v1.types.InstanceProperties): The instance properties for this instance template. + + This field is a member of `oneof`_ ``_properties``. self_link (str): [Output Only] The URL for this instance template. The server defines this URL. + + This field is a member of `oneof`_ ``_self_link``. source_instance (str): The source instance used to create the template. You can provide this as a partial or @@ -22037,9 +24758,13 @@ class InstanceTemplate(proto.Message): https://www.googleapis.com/compute/v1/projects/project/zones/zone /instances/instance - projects/project/zones/zone/instances/instance + + This field is a member of `oneof`_ ``_source_instance``. source_instance_params (google.cloud.compute_v1.types.SourceInstanceParams): The source instance params to use to create this instance template. + + This field is a member of `oneof`_ ``_source_instance_params``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -22059,16 +24784,21 @@ class InstanceTemplate(proto.Message): class InstanceTemplateList(proto.Message): r"""A list of instance templates. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceTemplate]): A list of InstanceTemplate resources. kind (str): [Output Only] The resource type, which is always compute#instanceTemplatesListResponse for instance template lists. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -22076,10 +24806,16 @@ class InstanceTemplateList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -22100,14 +24836,19 @@ def raw_page(self): class InstanceWithNamedPorts(proto.Message): r""" + Attributes: instance (str): [Output Only] The URL of the instance. + + This field is a member of `oneof`_ ``_instance``. named_ports (Sequence[google.cloud.compute_v1.types.NamedPort]): [Output Only] The named ports that belong to this instance group. status (google.cloud.compute_v1.types.InstanceWithNamedPorts.Status): [Output Only] The status of the instance. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -22133,6 +24874,7 @@ class Status(proto.Enum): class InstancesAddResourcePoliciesRequest(proto.Message): r""" + Attributes: resource_policies (Sequence[str]): Resource policies to be added to this @@ -22144,6 +24886,7 @@ class InstancesAddResourcePoliciesRequest(proto.Message): class InstancesGetEffectiveFirewallsResponse(proto.Message): r""" + Attributes: firewall_policys (Sequence[google.cloud.compute_v1.types.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): Effective firewalls from firewall policies. @@ -22163,18 +24906,27 @@ class InstancesGetEffectiveFirewallsResponse(proto.Message): class InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Message): r""" + Attributes: display_name (str): [Output Only] Deprecated, please use short name instead. The display name of the firewall policy. + + This field is a member of `oneof`_ ``_display_name``. name (str): [Output Only] The name of the firewall policy. + + This field is a member of `oneof`_ ``_name``. rules (Sequence[google.cloud.compute_v1.types.FirewallPolicyRule]): The rules that apply to the network. short_name (str): [Output Only] The short name of the firewall policy. + + This field is a member of `oneof`_ ``_short_name``. type_ (google.cloud.compute_v1.types.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy.Type): [Output Only] The type of the firewall policy. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -22194,6 +24946,7 @@ class Type(proto.Enum): class InstancesRemoveResourcePoliciesRequest(proto.Message): r""" + Attributes: resource_policies (Sequence[str]): Resource policies to be removed from this @@ -22205,12 +24958,15 @@ class InstancesRemoveResourcePoliciesRequest(proto.Message): class InstancesScopedList(proto.Message): r""" + Attributes: instances (Sequence[google.cloud.compute_v1.types.Instance]): [Output Only] A list of instances contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of instances when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ instances = proto.RepeatedField(proto.MESSAGE, number=29097598, message="Instance",) @@ -22221,12 +24977,15 @@ class InstancesScopedList(proto.Message): class InstancesSetLabelsRequest(proto.Message): r""" + Attributes: label_fingerprint (str): Fingerprint of the previous set of labels for this resource, used to prevent conflicts. Provide the latest fingerprint value when making a request to add or change labels. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.InstancesSetLabelsRequest.LabelsEntry]): """ @@ -22237,6 +24996,7 @@ class InstancesSetLabelsRequest(proto.Message): class InstancesSetMachineResourcesRequest(proto.Message): r""" + Attributes: guest_accelerators (Sequence[google.cloud.compute_v1.types.AcceleratorConfig]): A list of the type and count of accelerator @@ -22250,12 +25010,15 @@ class InstancesSetMachineResourcesRequest(proto.Message): class InstancesSetMachineTypeRequest(proto.Message): r""" + Attributes: machine_type (str): Full or partial URL of the machine type resource. See Machine Types for a full list of machine types. For example: zones/us- central1-f/machineTypes/n1-standard-1 + + This field is a member of `oneof`_ ``_machine_type``. """ machine_type = proto.Field(proto.STRING, number=227711026, optional=True,) @@ -22263,10 +25026,13 @@ class InstancesSetMachineTypeRequest(proto.Message): class InstancesSetMinCpuPlatformRequest(proto.Message): r""" + Attributes: min_cpu_platform (str): Minimum cpu/platform this instance should be started at. + + This field is a member of `oneof`_ ``_min_cpu_platform``. """ min_cpu_platform = proto.Field(proto.STRING, number=242912759, optional=True,) @@ -22274,9 +25040,12 @@ class InstancesSetMinCpuPlatformRequest(proto.Message): class InstancesSetServiceAccountRequest(proto.Message): r""" + Attributes: email (str): Email address of the service account. + + This field is a member of `oneof`_ ``_email``. scopes (Sequence[str]): The list of scopes to be made available for this service account. @@ -22288,6 +25057,7 @@ class InstancesSetServiceAccountRequest(proto.Message): class InstancesStartWithEncryptionKeyRequest(proto.Message): r""" + Attributes: disks (Sequence[google.cloud.compute_v1.types.CustomerEncryptionKeyProtectedDisk]): Array of disks associated with this instance @@ -22312,9 +25082,13 @@ class Int64RangeMatch(proto.Message): range_end (int): The end of the range (exclusive) in signed long integer format. + + This field is a member of `oneof`_ ``_range_end``. range_start (int): The start of the range (inclusive) in signed long integer format. + + This field is a member of `oneof`_ ``_range_start``. """ range_end = proto.Field(proto.INT64, number=322439897, optional=True,) @@ -22335,32 +25109,46 @@ class Interconnect(proto.Message): false, no packets can be carried over the interconnect and no BGP routes are exchanged over it. By default, the status is set to true. + + This field is a member of `oneof`_ ``_admin_enabled``. circuit_infos (Sequence[google.cloud.compute_v1.types.InterconnectCircuitInfo]): [Output Only] A list of CircuitInfo objects, that describe the individual circuits in this LAG. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. customer_name (str): Customer name, to put in the Letter of Authorization as the party authorized to request a crossconnect. + + This field is a member of `oneof`_ ``_customer_name``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. expected_outages (Sequence[google.cloud.compute_v1.types.InterconnectOutageNotification]): [Output Only] A list of outages expected for this Interconnect. google_ip_address (str): [Output Only] IP address configured on the Google side of the Interconnect link. This can be used only for ping tests. + + This field is a member of `oneof`_ ``_google_ip_address``. google_reference_id (str): [Output Only] Google reference ID to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. + + This field is a member of `oneof`_ ``_google_reference_id``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. interconnect_attachments (Sequence[str]): [Output Only] A list of the URLs of all InterconnectAttachments configured to use this Interconnect. @@ -22370,19 +25158,27 @@ class Interconnect(proto.Message): between customers though a partner. - DEDICATED: A dedicated physical interconnection with the customer. Note that a value IT_PRIVATE has been deprecated in favor of DEDICATED. + + This field is a member of `oneof`_ ``_interconnect_type``. kind (str): [Output Only] Type of the resource. Always compute#interconnect for interconnects. + + This field is a member of `oneof`_ ``_kind``. link_type (google.cloud.compute_v1.types.Interconnect.LinkType): Type of link requested, which can take one of the following values: - LINK_TYPE_ETHERNET_10G_LR: A 10G Ethernet with LR optics - LINK_TYPE_ETHERNET_100G_LR: A 100G Ethernet with LR optics. Note that this field indicates the speed of each of the links in the bundle, not the speed of the entire bundle. + + This field is a member of `oneof`_ ``_link_type``. location (str): URL of the InterconnectLocation object that represents where this connection is to be provisioned. + + This field is a member of `oneof`_ ``_location``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -22392,6 +25188,8 @@ class Interconnect(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. noc_contact_email (str): Email address to contact the customer NOC for operations and maintenance notifications @@ -22399,6 +25197,8 @@ class Interconnect(proto.Message): will be used for notifications in addition to all other forms described, such as Stackdriver logs alerting and Cloud Notifications. + + This field is a member of `oneof`_ ``_noc_contact_email``. operational_status (google.cloud.compute_v1.types.Interconnect.OperationalStatus): [Output Only] The current status of this Interconnect's functionality, which can take one of the following values: - @@ -22409,23 +25209,35 @@ class Interconnect(proto.Message): this Interconnect. - OS_UNDER_MAINTENANCE: An Interconnect that is undergoing internal maintenance. No attachments may be provisioned or updated on this Interconnect. + + This field is a member of `oneof`_ ``_operational_status``. peer_ip_address (str): [Output Only] IP address configured on the customer side of the Interconnect link. The customer should configure this IP address during turnup when prompted by Google NOC. This can be used only for ping tests. + + This field is a member of `oneof`_ ``_peer_ip_address``. provisioned_link_count (int): [Output Only] Number of links actually provisioned in this interconnect. + + This field is a member of `oneof`_ ``_provisioned_link_count``. requested_link_count (int): Target number of physical links in the link bundle, as requested by the customer. + + This field is a member of `oneof`_ ``_requested_link_count``. satisfies_pzs (bool): [Output Only] Set to true if the resource satisfies the zone separation organization policy constraints and false otherwise. Defaults to false if the field is not present. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. state (google.cloud.compute_v1.types.Interconnect.State): [Output Only] The current state of Interconnect functionality, which can take one of the following values: - @@ -22436,6 +25248,8 @@ class Interconnect(proto.Message): UNDER_MAINTENANCE: The Interconnect is undergoing internal maintenance. No attachments may be provisioned or updated on this Interconnect. + + This field is a member of `oneof`_ ``_state``. """ class InterconnectType(proto.Enum): @@ -22534,6 +25348,8 @@ class InterconnectAttachment(proto.Message): admin_enabled (bool): Determines whether this Attachment will carry packets. Not present for PARTNER_PROVIDER. + + This field is a member of `oneof`_ ``_admin_enabled``. bandwidth (google.cloud.compute_v1.types.InterconnectAttachment.Bandwidth): Provisioned bandwidth capacity for the interconnect attachment. For attachments of type DEDICATED, the user can @@ -22546,6 +25362,8 @@ class InterconnectAttachment(proto.Message): BPS_400M: 400 Mbit/s - BPS_500M: 500 Mbit/s - BPS_1G: 1 Gbit/s - BPS_2G: 2 Gbit/s - BPS_5G: 5 Gbit/s - BPS_10G: 10 Gbit/s - BPS_20G: 20 Gbit/s - BPS_50G: 50 Gbit/s + + This field is a member of `oneof`_ ``_bandwidth``. candidate_subnets (Sequence[str]): Up to 16 candidate prefixes that can be used to restrict the allocation of @@ -22562,17 +25380,27 @@ class InterconnectAttachment(proto.Message): cloud_router_ip_address (str): [Output Only] IPv4 address + prefix length to be configured on Cloud Router Interface for this interconnect attachment. + + This field is a member of `oneof`_ ``_cloud_router_ip_address``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. customer_router_ip_address (str): [Output Only] IPv4 address + prefix length to be configured on the customer router subinterface for this interconnect attachment. + + This field is a member of `oneof`_ ``_customer_router_ip_address``. dataplane_version (int): [Output Only] Dataplane version for this InterconnectAttachment. + + This field is a member of `oneof`_ ``_dataplane_version``. description (str): An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. edge_availability_domain (google.cloud.compute_v1.types.InterconnectAttachment.EdgeAvailabilityDomain): Desired availability domain for the attachment. Only available for type PARTNER, at creation time, and can take @@ -22584,6 +25412,8 @@ class InterconnectAttachment(proto.Message): pairing key, so that the provisioned circuit will lie in the specified domain. If not specified, the value will default to AVAILABILITY_DOMAIN_ANY. + + This field is a member of `oneof`_ ``_edge_availability_domain``. encryption (google.cloud.compute_v1.types.InterconnectAttachment.Encryption): Indicates the user-supplied encryption option of this VLAN attachment (interconnectAttachment). Can only be specified @@ -22598,17 +25428,25 @@ class InterconnectAttachment(proto.Message): from, such a VLAN attachment. To use *IPsec-encrypted Cloud Interconnect*, the VLAN attachment must be created with this option. Not currently available publicly. + + This field is a member of `oneof`_ ``_encryption``. google_reference_id (str): [Output Only] Google reference ID, to be used when raising support tickets with Google or otherwise to debug backend connectivity issues. [Deprecated] This field is not used. + + This field is a member of `oneof`_ ``_google_reference_id``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. interconnect (str): URL of the underlying Interconnect object that this attachment's traffic will traverse through. + + This field is a member of `oneof`_ ``_interconnect``. ipsec_internal_addresses (Sequence[str]): A list of URLs of addresses that have been reserved for the VLAN attachment. Used only for @@ -22633,11 +25471,15 @@ class InterconnectAttachment(proto.Message): kind (str): [Output Only] Type of the resource. Always compute#interconnectAttachment for interconnect attachments. + + This field is a member of `oneof`_ ``_kind``. mtu (int): Maximum Transmission Unit (MTU), in bytes, of packets passing through this interconnect attachment. Only 1440 and 1500 are allowed. If not specified, the value will default to 1440. + + This field is a member of `oneof`_ ``_mtu``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -22647,6 +25489,8 @@ class InterconnectAttachment(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. operational_status (google.cloud.compute_v1.types.InterconnectAttachment.OperationalStatus): [Output Only] The current status of whether or not this interconnect attachment is functional, which can take one of @@ -22654,31 +25498,43 @@ class InterconnectAttachment(proto.Message): turned up and is ready to use. - OS_UNPROVISIONED: The attachment is not ready to use yet, because turnup is not complete. + + This field is a member of `oneof`_ ``_operational_status``. pairing_key (str): [Output only for type PARTNER. Input only for PARTNER_PROVIDER. Not present for DEDICATED]. The opaque identifier of an PARTNER attachment used to initiate provisioning with a selected partner. Of the form "XXXXX/region/domain". + + This field is a member of `oneof`_ ``_pairing_key``. partner_asn (int): Optional BGP ASN for the router supplied by a Layer 3 Partner if they configured BGP on behalf of the customer. Output only for PARTNER type, input only for PARTNER_PROVIDER, not available for DEDICATED. + + This field is a member of `oneof`_ ``_partner_asn``. partner_metadata (google.cloud.compute_v1.types.InterconnectAttachmentPartnerMetadata): Informational metadata about Partner attachments from Partners to display to customers. Output only for for PARTNER type, mutable for PARTNER_PROVIDER, not available for DEDICATED. + + This field is a member of `oneof`_ ``_partner_metadata``. private_interconnect_info (google.cloud.compute_v1.types.InterconnectAttachmentPrivateInfo): [Output Only] Information specific to an InterconnectAttachment. This property is populated if the interconnect that this is attached to is of type DEDICATED. + + This field is a member of `oneof`_ ``_private_interconnect_info``. region (str): [Output Only] URL of the region where the regional interconnect attachment resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. router (str): URL of the Cloud Router to be used for dynamic routing. This router must be in the same @@ -22686,12 +25542,18 @@ class InterconnectAttachment(proto.Message): InterconnectAttachment will automatically connect the Interconnect to the network & region within which the Cloud Router is configured. + + This field is a member of `oneof`_ ``_router``. satisfies_pzs (bool): [Output Only] Set to true if the resource satisfies the zone separation organization policy constraints and false otherwise. Defaults to false if the field is not present. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. state (google.cloud.compute_v1.types.InterconnectAttachment.State): [Output Only] The current state of this attachment's functionality. Enum values ACTIVE and UNPROVISIONED are @@ -22713,6 +25575,8 @@ class InterconnectAttachment(proto.Message): longer functional. This could be because the associated Interconnect was removed, or because the other side of a Partner attachment was deleted. + + This field is a member of `oneof`_ ``_state``. type_ (google.cloud.compute_v1.types.InterconnectAttachment.Type): The type of interconnect attachment this is, which can take one of the following values: - DEDICATED: an attachment to a @@ -22720,10 +25584,14 @@ class InterconnectAttachment(proto.Message): Partner Interconnect, created by the customer. - PARTNER_PROVIDER: an attachment to a Partner Interconnect, created by the partner. + + This field is a member of `oneof`_ ``_type``. vlan_tag8021q (int): The IEEE 802.1Q VLAN tag for this attachment, in the range 2-4094. Only specified at creation time. + + This field is a member of `oneof`_ ``_vlan_tag8021q``. """ class Bandwidth(proto.Enum): @@ -22893,10 +25761,13 @@ class Type(proto.Enum): class InterconnectAttachmentAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InterconnectAttachmentAggregatedList.ItemsEntry]): A list of InterconnectAttachmentsScopedList resources. @@ -22904,6 +25775,8 @@ class InterconnectAttachmentAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#interconnectAttachmentAggregatedList for aggregated lists of interconnect attachments. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -22911,12 +25784,18 @@ class InterconnectAttachmentAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -22947,12 +25826,16 @@ class InterconnectAttachmentList(proto.Message): id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InterconnectAttachment]): A list of InterconnectAttachment resources. kind (str): [Output Only] Type of resource. Always compute#interconnectAttachmentList for lists of interconnect attachments. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -22960,10 +25843,16 @@ class InterconnectAttachmentList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -22995,16 +25884,22 @@ class InterconnectAttachmentPartnerMetadata(proto.Message): Partner's portal. For instance "Chicago 1". This value may be validated to match approved Partner values. + + This field is a member of `oneof`_ ``_interconnect_name``. partner_name (str): Plain text name of the Partner providing this attachment. This value may be validated to match approved Partner values. + + This field is a member of `oneof`_ ``_partner_name``. portal_url (str): URL of the Partner's portal for this Attachment. Partners may customise this to be a deep link to the specific resource on the Partner portal. This value may be validated to match approved Partner values. + + This field is a member of `oneof`_ ``_portal_url``. """ interconnect_name = proto.Field(proto.STRING, number=514963356, optional=True,) @@ -23021,6 +25916,8 @@ class InterconnectAttachmentPrivateInfo(proto.Message): [Output Only] 802.1q encapsulation tag to be used for traffic between Google and the customer, going to and from this network and region. + + This field is a member of `oneof`_ ``_tag8021q``. """ tag8021q = proto.Field(proto.UINT32, number=271820992, optional=True,) @@ -23028,6 +25925,7 @@ class InterconnectAttachmentPrivateInfo(proto.Message): class InterconnectAttachmentsScopedList(proto.Message): r""" + Attributes: interconnect_attachments (Sequence[google.cloud.compute_v1.types.InterconnectAttachment]): A list of interconnect attachments contained @@ -23035,6 +25933,8 @@ class InterconnectAttachmentsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ interconnect_attachments = proto.RepeatedField( @@ -23053,13 +25953,19 @@ class InterconnectCircuitInfo(proto.Message): Attributes: customer_demarc_id (str): Customer-side demarc ID for this circuit. + + This field is a member of `oneof`_ ``_customer_demarc_id``. google_circuit_id (str): Google-assigned unique ID for this circuit. Assigned at circuit turn-up. + + This field is a member of `oneof`_ ``_google_circuit_id``. google_demarc_id (str): Google-side demarc ID for this circuit. Assigned at circuit turn-up and provided by Google to the customer in the LOA. + + This field is a member of `oneof`_ ``_google_demarc_id``. """ customer_demarc_id = proto.Field(proto.STRING, number=28771859, optional=True,) @@ -23086,6 +25992,8 @@ class InterconnectDiagnostics(proto.Message): mac_address (str): The MAC address of the Interconnect's bundle interface. + + This field is a member of `oneof`_ ``_mac_address``. """ arp_caches = proto.RepeatedField( @@ -23099,11 +26007,16 @@ class InterconnectDiagnostics(proto.Message): class InterconnectDiagnosticsARPEntry(proto.Message): r"""Describing the ARP neighbor entries seen on this link + Attributes: ip_address (str): The IP address of this ARP neighbor. + + This field is a member of `oneof`_ ``_ip_address``. mac_address (str): The MAC address of this ARP neighbor. + + This field is a member of `oneof`_ ``_mac_address``. """ ip_address = proto.Field(proto.STRING, number=406272220, optional=True,) @@ -23112,13 +26025,18 @@ class InterconnectDiagnosticsARPEntry(proto.Message): class InterconnectDiagnosticsLinkLACPStatus(proto.Message): r""" + Attributes: google_system_id (str): System ID of the port on Google's side of the LACP exchange. + + This field is a member of `oneof`_ ``_google_system_id``. neighbor_system_id (str): System ID of the port on the neighbor's side of the LACP exchange. + + This field is a member of `oneof`_ ``_neighbor_system_id``. state (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkLACPStatus.State): The state of a LACP link, which can take one of the following values: - ACTIVE: The link is @@ -23126,6 +26044,8 @@ class InterconnectDiagnosticsLinkLACPStatus(proto.Message): DETACHED: The link is not configured within the bundle. This means that the rest of the object should be empty. + + This field is a member of `oneof`_ ``_state``. """ class State(proto.Enum): @@ -23145,6 +26065,7 @@ class State(proto.Enum): class InterconnectDiagnosticsLinkOpticalPower(proto.Message): r""" + Attributes: state (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkOpticalPower.State): The status of the current value when compared to the warning @@ -23156,6 +26077,8 @@ class InterconnectDiagnosticsLinkOpticalPower(proto.Message): LOW_ALARM: The value has crossed below the low alarm threshold. - HIGH_ALARM: The value has crossed above the high alarm threshold. + + This field is a member of `oneof`_ ``_state``. value (float): Value of the current receiving or transmitting optical power, read in dBm. Take a @@ -23163,6 +26086,8 @@ class InterconnectDiagnosticsLinkOpticalPower(proto.Message): and trigger warnings relative to that value. In general, a -7dBm warning and a -11dBm alarm are good optical value estimates for most links. + + This field is a member of `oneof`_ ``_value``. """ class State(proto.Enum): @@ -23188,6 +26113,7 @@ class State(proto.Enum): class InterconnectDiagnosticsLinkStatus(proto.Message): r""" + Attributes: arp_caches (Sequence[google.cloud.compute_v1.types.InterconnectDiagnosticsARPEntry]): A list of InterconnectDiagnostics.ARPEntry @@ -23197,19 +26123,28 @@ class InterconnectDiagnosticsLinkStatus(proto.Message): circuit_id (str): The unique ID for this link assigned during turn up by Google. + + This field is a member of `oneof`_ ``_circuit_id``. google_demarc (str): The Demarc address assigned by Google and provided in the LoA. + + This field is a member of `oneof`_ ``_google_demarc``. lacp_status (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkLACPStatus): + This field is a member of `oneof`_ ``_lacp_status``. receiving_optical_power (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkOpticalPower): An InterconnectDiagnostics.LinkOpticalPower object, describing the current value and status of the received light level. + + This field is a member of `oneof`_ ``_receiving_optical_power``. transmitting_optical_power (google.cloud.compute_v1.types.InterconnectDiagnosticsLinkOpticalPower): An InterconnectDiagnostics.LinkOpticalPower object, describing the current value and status of the transmitted light level. + + This field is a member of `oneof`_ ``_transmitting_optical_power``. """ arp_caches = proto.RepeatedField( @@ -23245,11 +26180,15 @@ class InterconnectList(proto.Message): id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Interconnect]): A list of Interconnect resources. kind (str): [Output Only] Type of resource. Always compute#interconnectList for lists of interconnects. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -23257,10 +26196,16 @@ class InterconnectList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -23290,40 +26235,64 @@ class InterconnectLocation(proto.Message): [Output Only] The postal address of the Point of Presence, each line in the address is separated by a newline character. + + This field is a member of `oneof`_ ``_address``. availability_zone (str): [Output Only] Availability zone for this InterconnectLocation. Within a metropolitan area (metro), maintenance will not be simultaneously scheduled in more than one availability zone. Example: "zone1" or "zone2". + + This field is a member of `oneof`_ ``_availability_zone``. city (str): [Output Only] Metropolitan area designator that indicates which city an interconnect is located. For example: "Chicago, IL", "Amsterdam, Netherlands". + + This field is a member of `oneof`_ ``_city``. continent (google.cloud.compute_v1.types.InterconnectLocation.Continent): [Output Only] Continent for this location, which can take one of the following values: - AFRICA - ASIA_PAC - EUROPE - NORTH_AMERICA - SOUTH_AMERICA + + This field is a member of `oneof`_ ``_continent``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): [Output Only] An optional description of the resource. + + This field is a member of `oneof`_ ``_description``. facility_provider (str): [Output Only] The name of the provider for this facility (e.g., EQUINIX). + + This field is a member of `oneof`_ ``_facility_provider``. facility_provider_facility_id (str): [Output Only] A provider-assigned Identifier for this facility (e.g., Ashburn-DC1). + + This field is a member of `oneof`_ ``_facility_provider_facility_id``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#interconnectLocation for interconnect locations. + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. peeringdb_facility_id (str): [Output Only] The peeringdb identifier for this facility (corresponding with a netfac type in peeringdb). + + This field is a member of `oneof`_ ``_peeringdb_facility_id``. region_infos (Sequence[google.cloud.compute_v1.types.InterconnectLocationRegionInfo]): [Output Only] A list of InterconnectLocation.RegionInfo objects, that describe parameters pertaining to the relation @@ -23331,6 +26300,8 @@ class InterconnectLocation(proto.Message): regions. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.InterconnectLocation.Status): [Output Only] The status of this InterconnectLocation, which can take one of the following values: - CLOSED: The @@ -23338,10 +26309,14 @@ class InterconnectLocation(proto.Message): provisioning new Interconnects. - AVAILABLE: The InterconnectLocation is available for provisioning new Interconnects. + + This field is a member of `oneof`_ ``_status``. supports_pzs (bool): [Output Only] Set to true for locations that support physical zone separation. Defaults to false if the field is not present. + + This field is a member of `oneof`_ ``_supports_pzs``. """ class Continent(proto.Enum): @@ -23404,12 +26379,16 @@ class InterconnectLocationList(proto.Message): id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InterconnectLocation]): A list of InterconnectLocation resources. kind (str): [Output Only] Type of resource. Always compute#interconnectLocationList for lists of interconnect locations. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -23417,10 +26396,16 @@ class InterconnectLocationList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -23449,11 +26434,17 @@ class InterconnectLocationRegionInfo(proto.Message): Expected round-trip time in milliseconds, from this InterconnectLocation to a VM in this region. + + This field is a member of `oneof`_ ``_expected_rtt_ms``. location_presence (google.cloud.compute_v1.types.InterconnectLocationRegionInfo.LocationPresence): Identifies the network presence of this location. + + This field is a member of `oneof`_ ``_location_presence``. region (str): URL for the region of this location. + + This field is a member of `oneof`_ ``_region``. """ class LocationPresence(proto.Enum): @@ -23473,6 +26464,7 @@ class LocationPresence(proto.Enum): class InterconnectOutageNotification(proto.Message): r"""Description of a planned outage on this Interconnect. + Attributes: affected_circuits (Sequence[str]): If issue_type is IT_PARTIAL_OUTAGE, a list of the @@ -23480,9 +26472,13 @@ class InterconnectOutageNotification(proto.Message): description (str): A description about the purpose of the outage. + + This field is a member of `oneof`_ ``_description``. end_time (int): Scheduled end time for the outage (milliseconds since Unix epoch). + + This field is a member of `oneof`_ ``_end_time``. issue_type (google.cloud.compute_v1.types.InterconnectOutageNotification.IssueType): Form this outage is expected to take, which can take one of the following values: - OUTAGE: The Interconnect may be @@ -23490,19 +26486,27 @@ class InterconnectOutageNotification(proto.Message): window. - PARTIAL_OUTAGE: Some circuits comprising the Interconnect as a whole should remain up, but with reduced bandwidth. Note that the versions of this enum prefixed with - "IT_" have been deprecated in favor of the unprefixed + `IT_` have been deprecated in favor of the unprefixed values. + + This field is a member of `oneof`_ ``_issue_type``. name (str): Unique identifier for this outage notification. + + This field is a member of `oneof`_ ``_name``. source (google.cloud.compute_v1.types.InterconnectOutageNotification.Source): The party that generated this notification, which can take the following value: - GOOGLE: this notification as generated by Google. Note that the value of NSRC_GOOGLE has been deprecated in favor of GOOGLE. + + This field is a member of `oneof`_ ``_source``. start_time (int): Scheduled start time for the outage (milliseconds since Unix epoch). + + This field is a member of `oneof`_ ``_start_time``. state (google.cloud.compute_v1.types.InterconnectOutageNotification.State): State of this notification, which can take one of the following values: - ACTIVE: This outage notification is @@ -23511,8 +26515,10 @@ class InterconnectOutageNotification(proto.Message): outage associated with this notification was cancelled before the outage was due to start. - COMPLETED: The outage associated with this notification is complete. Note that the - versions of this enum prefixed with "NS_" have been + versions of this enum prefixed with `NS_` have been deprecated in favor of the unprefixed values. + + This field is a member of `oneof`_ ``_state``. """ class IssueType(proto.Enum): @@ -23521,7 +26527,7 @@ class IssueType(proto.Enum): of service for some or all of the specified window. - PARTIAL_OUTAGE: Some circuits comprising the Interconnect as a whole should remain up, but with reduced bandwidth. Note that the versions - of this enum prefixed with "IT_" have been deprecated in favor of + of this enum prefixed with `IT_` have been deprecated in favor of the unprefixed values. """ UNDEFINED_ISSUE_TYPE = 0 @@ -23547,7 +26553,7 @@ class State(proto.Enum): end_time for scheduling. - CANCELLED: The outage associated with this notification was cancelled before the outage was due to start. - COMPLETED: The outage associated with this notification is - complete. Note that the versions of this enum prefixed with "NS_" + complete. Note that the versions of this enum prefixed with `NS_` have been deprecated in favor of the unprefixed values. """ UNDEFINED_STATE = 0 @@ -23571,9 +26577,11 @@ class State(proto.Enum): class InterconnectsGetDiagnosticsResponse(proto.Message): r"""Response for the InterconnectsGetDiagnosticsRequest. + Attributes: result (google.cloud.compute_v1.types.InterconnectDiagnostics): + This field is a member of `oneof`_ ``_result``. """ result = proto.Field( @@ -23609,6 +26617,8 @@ class InvalidateCacheUrlMapRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap scoping this request. """ @@ -23623,6 +26633,7 @@ class InvalidateCacheUrlMapRequest(proto.Message): class Items(proto.Message): r"""Metadata + Attributes: key (str): Key for the metadata entry. Keys must conform to the @@ -23630,6 +26641,8 @@ class Items(proto.Message): in length. This is reflected as part of a URL in the metadata server. Additionally, to avoid ambiguity, keys must not conflict with any other metadata keys for the project. + + This field is a member of `oneof`_ ``_key``. value (str): Value for the metadata entry. These are free- orm strings, and only have meaning as @@ -23637,6 +26650,8 @@ class Items(proto.Message): instance. The only restriction placed on values is that their size must be less than or equal to 262144 bytes (256 KiB). + + This field is a member of `oneof`_ ``_value``. """ key = proto.Field(proto.STRING, number=106079, optional=True,) @@ -23653,33 +26668,52 @@ class License(proto.Message): charges_use_fee (bool): [Output Only] Deprecated. This field no longer reflects whether a license charges a usage fee. + + This field is a member of `oneof`_ ``_charges_use_fee``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional textual description of the resource; provided by the client when the resource is created. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#license for licenses. + + This field is a member of `oneof`_ ``_kind``. license_code (int): [Output Only] The unique code used to attach this license to images, snapshots, and disks. + + This field is a member of `oneof`_ ``_license_code``. name (str): Name of the resource. The name must be 1-63 characters long and comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. resource_requirements (google.cloud.compute_v1.types.LicenseResourceRequirements): + This field is a member of `oneof`_ ``_resource_requirements``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. transferable (bool): If false, licenses will not be copied from the source resource when creating an image from a disk, disk from snapshot, or snapshot from disk. + + This field is a member of `oneof`_ ``_transferable``. """ charges_use_fee = proto.Field(proto.BOOL, number=372412622, optional=True,) @@ -23708,28 +26742,44 @@ class LicenseCode(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): [Output Only] Description of this License Code. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#licenseCode for licenses. + + This field is a member of `oneof`_ ``_kind``. license_alias (Sequence[google.cloud.compute_v1.types.LicenseCodeLicenseAlias]): [Output Only] URL and description aliases of Licenses with the same License Code. name (str): [Output Only] Name of the resource. The name is 1-20 characters long and must be a valid 64 bit integer. + + This field is a member of `oneof`_ ``_name``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. state (google.cloud.compute_v1.types.LicenseCode.State): [Output Only] Current state of this License Code. + + This field is a member of `oneof`_ ``_state``. transferable (bool): [Output Only] If true, the license will remain attached when creating images or snapshots from disks. Otherwise, the license is not transferred. + + This field is a member of `oneof`_ ``_transferable``. """ class State(proto.Enum): @@ -23756,12 +26806,17 @@ class State(proto.Enum): class LicenseCodeLicenseAlias(proto.Message): r""" + Attributes: description (str): [Output Only] Description of this License Code. + + This field is a member of `oneof`_ ``_description``. self_link (str): [Output Only] URL of license corresponding to this License Code. + + This field is a member of `oneof`_ ``_self_link``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -23770,14 +26825,21 @@ class LicenseCodeLicenseAlias(proto.Message): class LicenseResourceCommitment(proto.Message): r"""Commitment for a particular license resource. + Attributes: amount (int): The number of licenses purchased. + + This field is a member of `oneof`_ ``_amount``. cores_per_license (str): Specifies the core range of the instance for which this license applies. + + This field is a member of `oneof`_ ``_cores_per_license``. license_ (str): Any applicable license URI. + + This field is a member of `oneof`_ ``_license``. """ amount = proto.Field(proto.INT64, number=196759640, optional=True,) @@ -23787,15 +26849,20 @@ class LicenseResourceCommitment(proto.Message): class LicenseResourceRequirements(proto.Message): r""" + Attributes: min_guest_cpu_count (int): Minimum number of guest cpus required to use the Instance. Enforced at Instance creation and Instance start. + + This field is a member of `oneof`_ ``_min_guest_cpu_count``. min_memory_mb (int): Minimum memory required to use the Instance. Enforced at Instance creation and Instance start. + + This field is a member of `oneof`_ ``_min_memory_mb``. """ min_guest_cpu_count = proto.Field(proto.INT32, number=477964836, optional=True,) @@ -23804,10 +26871,13 @@ class LicenseResourceRequirements(proto.Message): class LicensesListResponse(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.License]): A list of License resources. next_page_token (str): @@ -23817,10 +26887,16 @@ class LicensesListResponse(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -23862,6 +26938,8 @@ class ListAcceleratorTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -23869,6 +26947,8 @@ class ListAcceleratorTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -23880,16 +26960,22 @@ class ListAcceleratorTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -23929,6 +27015,8 @@ class ListAddressesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -23936,6 +27024,8 @@ class ListAddressesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -23947,10 +27037,14 @@ class ListAddressesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -23959,6 +27053,8 @@ class ListAddressesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -23978,6 +27074,8 @@ class ListAssociationsFirewallPolicyRequest(proto.Message): target_resource (str): The target resource to list associations. It is an organization, or a folder. + + This field is a member of `oneof`_ ``_target_resource``. """ target_resource = proto.Field(proto.STRING, number=467318524, optional=True,) @@ -24009,6 +27107,8 @@ class ListAutoscalersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24016,6 +27116,8 @@ class ListAutoscalersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24027,16 +27129,22 @@ class ListAutoscalersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): Name of the zone for this request. """ @@ -24076,6 +27184,8 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24083,6 +27193,8 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24094,16 +27206,22 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24140,6 +27258,8 @@ class ListBackendBucketsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24147,6 +27267,8 @@ class ListBackendBucketsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24158,16 +27280,22 @@ class ListBackendBucketsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24204,6 +27332,8 @@ class ListBackendServicesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24211,6 +27341,8 @@ class ListBackendServicesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24222,16 +27354,22 @@ class ListBackendServicesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24268,6 +27406,8 @@ class ListDiskTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24275,6 +27415,8 @@ class ListDiskTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24286,16 +27428,22 @@ class ListDiskTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -24335,6 +27483,8 @@ class ListDisksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24342,6 +27492,8 @@ class ListDisksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24353,16 +27505,22 @@ class ListDisksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -24402,86 +27560,251 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - instance_group_manager (str): - The name of the managed instance group. It must be a string - that meets the requirements in RFC1035, or an unsigned long - integer: must match regexp pattern: - (?:`a-z `__?)|1-9{0,19}. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - zone (str): - The name of the zone where the managed - instance group is located. It should conform to - RFC1035. - """ - - filter = proto.Field(proto.STRING, number=336120696, optional=True,) - instance_group_manager = proto.Field(proto.STRING, number=249363395,) - max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) - order_by = proto.Field(proto.STRING, number=160562920, optional=True,) - page_token = proto.Field(proto.STRING, number=19994697, optional=True,) - project = proto.Field(proto.STRING, number=227560217,) - return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) - zone = proto.Field(proto.STRING, number=3744684,) - - -class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): - r"""A request message for RegionInstanceGroupManagers.ListErrors. - See the method description for details. - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either ``=``, - ``!=``, ``>``, or ``<``. For example, if you are filtering - Compute Engine instances, you can exclude instances named - ``example-instance`` by specifying - ``name != example-instance``. You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - instance_group_manager (str): - The name of the managed instance group. It must be a string - that meets the requirements in RFC1035, or an unsigned long - integer: must match regexp pattern: - (?:`a-z `__?)|1-9{0,19}. + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where the managed + instance group is located. It should conform to + RFC1035. + """ + + filter = proto.Field(proto.STRING, number=336120696, optional=True,) + instance_group_manager = proto.Field(proto.STRING, number=249363395,) + max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) + order_by = proto.Field(proto.STRING, number=160562920, optional=True,) + page_token = proto.Field(proto.STRING, number=19994697, optional=True,) + project = proto.Field(proto.STRING, number=227560217,) + return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) + zone = proto.Field(proto.STRING, number=3744684,) + + +class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.ListErrors. + See the method description for details. + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either ``=``, + ``!=``, ``>``, or ``<``. For example, if you are filtering + Compute Engine instances, you can exclude instances named + ``example-instance`` by specifying + ``name != example-instance``. You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. This + should conform to RFC1035. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter = proto.Field(proto.STRING, number=336120696, optional=True,) + instance_group_manager = proto.Field(proto.STRING, number=249363395,) + max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) + order_by = proto.Field(proto.STRING, number=160562920, optional=True,) + page_token = proto.Field(proto.STRING, number=19994697, optional=True,) + project = proto.Field(proto.STRING, number=227560217,) + region = proto.Field(proto.STRING, number=138946292,) + return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) + + +class ListExternalVpnGatewaysRequest(proto.Message): + r"""A request message for ExternalVpnGateways.List. See the + method description for details. + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either ``=``, + ``!=``, ``>``, or ``<``. For example, if you are filtering + Compute Engine instances, you can exclude instances named + ``example-instance`` by specifying + ``name != example-instance``. You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter = proto.Field(proto.STRING, number=336120696, optional=True,) + max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) + order_by = proto.Field(proto.STRING, number=160562920, optional=True,) + page_token = proto.Field(proto.STRING, number=19994697, optional=True,) + project = proto.Field(proto.STRING, number=227560217,) + return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) + + +class ListFirewallPoliciesRequest(proto.Message): + r"""A request message for FirewallPolicies.List. See the method + description for details. + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either ``=``, + ``!=``, ``>``, or ``<``. For example, if you are filtering + Compute Engine instances, you can exclude instances named + ``example-instance`` by specifying + ``name != example-instance``. You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24489,75 +27812,8 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - project (str): - Project ID for this request. - region (str): - Name of the region scoping this request. This - should conform to RFC1035. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - """ - - filter = proto.Field(proto.STRING, number=336120696, optional=True,) - instance_group_manager = proto.Field(proto.STRING, number=249363395,) - max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) - order_by = proto.Field(proto.STRING, number=160562920, optional=True,) - page_token = proto.Field(proto.STRING, number=19994697, optional=True,) - project = proto.Field(proto.STRING, number=227560217,) - region = proto.Field(proto.STRING, number=138946292,) - return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) - -class ListExternalVpnGatewaysRequest(proto.Message): - r"""A request message for ExternalVpnGateways.List. See the - method description for details. - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either ``=``, - ``!=``, ``>``, or ``<``. For example, if you are filtering - Compute Engine instances, you can exclude instances named - ``example-instance`` by specifying - ``name != example-instance``. You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24569,80 +27825,24 @@ class ListExternalVpnGatewaysRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - """ - filter = proto.Field(proto.STRING, number=336120696, optional=True,) - max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) - order_by = proto.Field(proto.STRING, number=160562920, optional=True,) - page_token = proto.Field(proto.STRING, number=19994697, optional=True,) - project = proto.Field(proto.STRING, number=227560217,) - return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) - - -class ListFirewallPoliciesRequest(proto.Message): - r"""A request message for FirewallPolicies.List. See the method - description for details. - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either ``=``, - ``!=``, ``>``, or ``<``. For example, if you are filtering - Compute Engine instances, you can exclude instances named - ``example-instance`` by specifying - ``name != example-instance``. You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. parent_id (str): Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24679,6 +27879,8 @@ class ListFirewallsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24686,6 +27888,8 @@ class ListFirewallsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24697,16 +27901,22 @@ class ListFirewallsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24743,6 +27953,8 @@ class ListForwardingRulesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24750,6 +27962,8 @@ class ListForwardingRulesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24761,10 +27975,14 @@ class ListForwardingRulesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -24773,6 +27991,8 @@ class ListForwardingRulesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24810,6 +28030,8 @@ class ListGlobalAddressesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24817,6 +28039,8 @@ class ListGlobalAddressesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24828,16 +28052,22 @@ class ListGlobalAddressesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24874,6 +28104,8 @@ class ListGlobalForwardingRulesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24881,6 +28113,8 @@ class ListGlobalForwardingRulesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24892,16 +28126,22 @@ class ListGlobalForwardingRulesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -24938,6 +28178,8 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -24945,6 +28187,8 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -24956,16 +28200,22 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25002,6 +28252,8 @@ class ListGlobalOperationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25009,6 +28261,8 @@ class ListGlobalOperationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25020,16 +28274,22 @@ class ListGlobalOperationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25066,6 +28326,8 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25073,6 +28335,8 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25084,16 +28348,24 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. parent_id (str): Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25130,6 +28402,8 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25137,6 +28411,8 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25148,16 +28424,22 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25194,6 +28476,8 @@ class ListHealthChecksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25201,6 +28485,8 @@ class ListHealthChecksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25212,16 +28498,22 @@ class ListHealthChecksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25258,6 +28550,8 @@ class ListImagesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25265,6 +28559,8 @@ class ListImagesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25276,16 +28572,22 @@ class ListImagesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25322,6 +28624,8 @@ class ListInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25329,6 +28633,8 @@ class ListInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25340,16 +28646,22 @@ class ListInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the managed instance group is located. @@ -25390,6 +28702,8 @@ class ListInstanceGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25397,6 +28711,8 @@ class ListInstanceGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25408,16 +28724,22 @@ class ListInstanceGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the instance group is located. @@ -25458,6 +28780,8 @@ class ListInstanceTemplatesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25465,6 +28789,8 @@ class ListInstanceTemplatesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25476,16 +28802,22 @@ class ListInstanceTemplatesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25522,6 +28854,8 @@ class ListInstancesInstanceGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance_group (str): The name of the instance group from which you want to generate a list of included instances. @@ -25534,6 +28868,8 @@ class ListInstancesInstanceGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25545,16 +28881,22 @@ class ListInstancesInstanceGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the instance group is located. @@ -25599,6 +28941,8 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance_group (str): Name of the regional instance group for which we want to list the instances. @@ -25609,6 +28953,8 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25620,10 +28966,14 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -25634,6 +28984,8 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25677,6 +29029,8 @@ class ListInstancesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25684,6 +29038,8 @@ class ListInstancesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25695,16 +29051,22 @@ class ListInstancesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -25744,6 +29106,8 @@ class ListInterconnectAttachmentsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25751,6 +29115,8 @@ class ListInterconnectAttachmentsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25762,10 +29128,14 @@ class ListInterconnectAttachmentsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -25774,6 +29144,8 @@ class ListInterconnectAttachmentsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25811,6 +29183,8 @@ class ListInterconnectLocationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25818,6 +29192,8 @@ class ListInterconnectLocationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25829,16 +29205,22 @@ class ListInterconnectLocationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25875,6 +29257,8 @@ class ListInterconnectsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25882,6 +29266,8 @@ class ListInterconnectsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25893,16 +29279,22 @@ class ListInterconnectsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -25939,6 +29331,8 @@ class ListLicensesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -25946,6 +29340,8 @@ class ListLicensesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -25957,16 +29353,22 @@ class ListLicensesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -26003,6 +29405,8 @@ class ListMachineTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26010,6 +29414,8 @@ class ListMachineTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26021,16 +29427,22 @@ class ListMachineTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -26071,6 +29483,8 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): The name of the managed instance group. max_results (int): @@ -26080,6 +29494,8 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26091,16 +29507,22 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the managed instance group is located. @@ -26143,6 +29565,8 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): The name of the managed instance group. max_results (int): @@ -26152,6 +29576,8 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26163,10 +29589,14 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -26175,6 +29605,8 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -26213,6 +29645,8 @@ class ListNetworkEndpointGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26220,6 +29654,8 @@ class ListNetworkEndpointGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26231,16 +29667,22 @@ class ListNetworkEndpointGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the network endpoint group is located. It should comply with @@ -26283,6 +29725,8 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26290,6 +29734,8 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. network_endpoint_group (str): The name of the network endpoint group from which you want to generate a list of included @@ -26306,16 +29752,22 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -26354,6 +29806,8 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26361,6 +29815,8 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. network_endpoint_group (str): The name of the network endpoint group from which you want to generate a list of included @@ -26379,16 +29835,22 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the network endpoint group is located. It should comply with @@ -26436,6 +29898,8 @@ class ListNetworksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26443,6 +29907,8 @@ class ListNetworksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26454,16 +29920,22 @@ class ListNetworksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -26500,6 +29972,8 @@ class ListNodeGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26507,6 +29981,8 @@ class ListNodeGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26518,16 +29994,22 @@ class ListNodeGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -26567,6 +30049,8 @@ class ListNodeTemplatesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26574,6 +30058,8 @@ class ListNodeTemplatesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26585,10 +30071,14 @@ class ListNodeTemplatesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -26597,6 +30087,8 @@ class ListNodeTemplatesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -26634,6 +30126,8 @@ class ListNodeTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26641,6 +30135,8 @@ class ListNodeTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26652,16 +30148,22 @@ class ListNodeTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -26701,6 +30203,8 @@ class ListNodesNodeGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26708,6 +30212,8 @@ class ListNodesNodeGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. node_group (str): Name of the NodeGroup resource whose nodes you want to list. @@ -26722,16 +30228,22 @@ class ListNodesNodeGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -26772,6 +30284,8 @@ class ListPacketMirroringsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26779,6 +30293,8 @@ class ListPacketMirroringsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26790,10 +30306,14 @@ class ListPacketMirroringsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -26802,6 +30322,8 @@ class ListPacketMirroringsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -26820,6 +30342,8 @@ class ListPeeringRoutesNetworksRequest(proto.Message): Attributes: direction (google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest.Direction): The direction of the exchanged routes. + + This field is a member of `oneof`_ ``_direction``. filter (str): A filter expression that filters resources listed in the response. The expression must specify the field name, a @@ -26841,6 +30365,8 @@ class ListPeeringRoutesNetworksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -26848,6 +30374,8 @@ class ListPeeringRoutesNetworksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. network (str): Name of the network for this request. order_by (str): @@ -26861,23 +30389,33 @@ class ListPeeringRoutesNetworksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. peering_name (str): The response will show routes exchanged over the given peering connection. + + This field is a member of `oneof`_ ``_peering_name``. project (str): Project ID for this request. region (str): The region of the request. The response will include all subnet routes, static routes and dynamic routes in the region. + + This field is a member of `oneof`_ ``_region``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ class Direction(proto.Enum): @@ -26927,6 +30465,8 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): The name of the managed instance group. It should conform to RFC1035. @@ -26937,6 +30477,8 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -26948,16 +30490,22 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone where the managed instance group is located. It should conform to @@ -27001,6 +30549,8 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance_group_manager (str): The name of the managed instance group. It should conform to RFC1035. @@ -27011,6 +30561,8 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27022,10 +30574,14 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27035,6 +30591,8 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27074,6 +30632,8 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27081,6 +30641,8 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27092,16 +30654,22 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27138,6 +30706,8 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27145,6 +30715,8 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27156,16 +30728,22 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27202,6 +30780,8 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27209,6 +30789,8 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27220,10 +30802,14 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27232,6 +30818,8 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27269,6 +30857,8 @@ class ListReferrersInstancesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. instance (str): Name of the target instance scoping this request, or '-' if the request should span over @@ -27280,6 +30870,8 @@ class ListReferrersInstancesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27291,16 +30883,22 @@ class ListReferrersInstancesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): The name of the zone for this request. """ @@ -27341,6 +30939,8 @@ class ListRegionAutoscalersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27348,6 +30948,8 @@ class ListRegionAutoscalersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27359,10 +30961,14 @@ class ListRegionAutoscalersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27371,6 +30977,8 @@ class ListRegionAutoscalersRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27408,6 +31016,8 @@ class ListRegionBackendServicesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27415,6 +31025,8 @@ class ListRegionBackendServicesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27426,10 +31038,14 @@ class ListRegionBackendServicesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27438,6 +31054,8 @@ class ListRegionBackendServicesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27475,6 +31093,8 @@ class ListRegionCommitmentsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27482,6 +31102,8 @@ class ListRegionCommitmentsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27493,10 +31115,14 @@ class ListRegionCommitmentsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27505,6 +31131,8 @@ class ListRegionCommitmentsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27542,6 +31170,8 @@ class ListRegionDiskTypesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27549,6 +31179,8 @@ class ListRegionDiskTypesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27560,10 +31192,14 @@ class ListRegionDiskTypesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27572,6 +31208,8 @@ class ListRegionDiskTypesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27609,6 +31247,8 @@ class ListRegionDisksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27616,6 +31256,8 @@ class ListRegionDisksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27627,10 +31269,14 @@ class ListRegionDisksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27639,6 +31285,8 @@ class ListRegionDisksRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27676,6 +31324,8 @@ class ListRegionHealthCheckServicesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27683,6 +31333,8 @@ class ListRegionHealthCheckServicesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27694,10 +31346,14 @@ class ListRegionHealthCheckServicesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27706,6 +31362,8 @@ class ListRegionHealthCheckServicesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27743,6 +31401,8 @@ class ListRegionHealthChecksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27750,6 +31410,8 @@ class ListRegionHealthChecksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27761,10 +31423,14 @@ class ListRegionHealthChecksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27773,6 +31439,8 @@ class ListRegionHealthChecksRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27810,6 +31478,8 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27817,6 +31487,8 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27828,10 +31500,14 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27840,6 +31516,8 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27877,6 +31555,8 @@ class ListRegionInstanceGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27884,6 +31564,8 @@ class ListRegionInstanceGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27895,10 +31577,14 @@ class ListRegionInstanceGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27907,6 +31593,8 @@ class ListRegionInstanceGroupsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -27944,6 +31632,8 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -27951,6 +31641,8 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -27962,10 +31654,14 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -27976,6 +31672,8 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28013,6 +31711,8 @@ class ListRegionNotificationEndpointsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28020,6 +31720,8 @@ class ListRegionNotificationEndpointsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28031,10 +31733,14 @@ class ListRegionNotificationEndpointsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28043,6 +31749,8 @@ class ListRegionNotificationEndpointsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28080,6 +31788,8 @@ class ListRegionOperationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28087,6 +31797,8 @@ class ListRegionOperationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28098,10 +31810,14 @@ class ListRegionOperationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28110,6 +31826,8 @@ class ListRegionOperationsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28147,6 +31865,8 @@ class ListRegionSslCertificatesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28154,6 +31874,8 @@ class ListRegionSslCertificatesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28165,10 +31887,14 @@ class ListRegionSslCertificatesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28177,6 +31903,8 @@ class ListRegionSslCertificatesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28214,6 +31942,8 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28221,6 +31951,8 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28232,10 +31964,14 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28244,6 +31980,8 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28281,6 +32019,8 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28288,6 +32028,8 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28299,10 +32041,14 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28311,6 +32057,8 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28348,6 +32096,8 @@ class ListRegionUrlMapsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28355,6 +32105,8 @@ class ListRegionUrlMapsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28366,10 +32118,14 @@ class ListRegionUrlMapsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28378,6 +32134,8 @@ class ListRegionUrlMapsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28415,6 +32173,8 @@ class ListRegionsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28422,6 +32182,8 @@ class ListRegionsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28433,16 +32195,22 @@ class ListRegionsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28479,6 +32247,8 @@ class ListReservationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28486,6 +32256,8 @@ class ListReservationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28497,16 +32269,22 @@ class ListReservationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): Name of the zone for this request. """ @@ -28546,6 +32324,8 @@ class ListResourcePoliciesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28553,6 +32333,8 @@ class ListResourcePoliciesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28564,10 +32346,14 @@ class ListResourcePoliciesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28576,6 +32362,8 @@ class ListResourcePoliciesRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28613,6 +32401,8 @@ class ListRoutersRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28620,6 +32410,8 @@ class ListRoutersRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28631,10 +32423,14 @@ class ListRoutersRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28643,6 +32439,8 @@ class ListRoutersRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28680,6 +32478,8 @@ class ListRoutesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28687,6 +32487,8 @@ class ListRoutesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28698,16 +32500,22 @@ class ListRoutesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28744,6 +32552,8 @@ class ListSecurityPoliciesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28751,6 +32561,8 @@ class ListSecurityPoliciesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28762,16 +32574,22 @@ class ListSecurityPoliciesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28808,6 +32626,8 @@ class ListServiceAttachmentsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28815,6 +32635,8 @@ class ListServiceAttachmentsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28826,10 +32648,14 @@ class ListServiceAttachmentsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -28838,6 +32664,8 @@ class ListServiceAttachmentsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28875,6 +32703,8 @@ class ListSnapshotsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28882,6 +32712,8 @@ class ListSnapshotsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28893,16 +32725,22 @@ class ListSnapshotsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -28939,6 +32777,8 @@ class ListSslCertificatesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -28946,6 +32786,8 @@ class ListSslCertificatesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -28957,16 +32799,22 @@ class ListSslCertificatesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29003,6 +32851,8 @@ class ListSslPoliciesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29010,6 +32860,8 @@ class ListSslPoliciesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29021,16 +32873,22 @@ class ListSslPoliciesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29067,6 +32925,8 @@ class ListSubnetworksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29074,6 +32934,8 @@ class ListSubnetworksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29085,10 +32947,14 @@ class ListSubnetworksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -29097,6 +32963,8 @@ class ListSubnetworksRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29134,6 +33002,8 @@ class ListTargetGrpcProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29141,6 +33011,8 @@ class ListTargetGrpcProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29152,16 +33024,22 @@ class ListTargetGrpcProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29198,6 +33076,8 @@ class ListTargetHttpProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29205,6 +33085,8 @@ class ListTargetHttpProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29216,16 +33098,22 @@ class ListTargetHttpProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29262,6 +33150,8 @@ class ListTargetHttpsProxiesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29269,6 +33159,8 @@ class ListTargetHttpsProxiesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29280,16 +33172,22 @@ class ListTargetHttpsProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29326,73 +33224,236 @@ class ListTargetInstancesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - zone (str): - Name of the zone scoping this request. - """ - filter = proto.Field(proto.STRING, number=336120696, optional=True,) - max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) - order_by = proto.Field(proto.STRING, number=160562920, optional=True,) - page_token = proto.Field(proto.STRING, number=19994697, optional=True,) - project = proto.Field(proto.STRING, number=227560217,) - return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) - zone = proto.Field(proto.STRING, number=3744684,) - - -class ListTargetPoolsRequest(proto.Message): - r"""A request message for TargetPools.List. See the method - description for details. - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either ``=``, - ``!=``, ``>``, or ``<``. For example, if you are filtering - Compute Engine instances, you can exclude instances named - ``example-instance`` by specifying - ``name != example-instance``. You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone scoping this request. + """ + + filter = proto.Field(proto.STRING, number=336120696, optional=True,) + max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) + order_by = proto.Field(proto.STRING, number=160562920, optional=True,) + page_token = proto.Field(proto.STRING, number=19994697, optional=True,) + project = proto.Field(proto.STRING, number=227560217,) + return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) + zone = proto.Field(proto.STRING, number=3744684,) + + +class ListTargetPoolsRequest(proto.Message): + r"""A request message for TargetPools.List. See the method + description for details. + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either ``=``, + ``!=``, ``>``, or ``<``. For example, if you are filtering + Compute Engine instances, you can exclude instances named + ``example-instance`` by specifying + ``name != example-instance``. You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter = proto.Field(proto.STRING, number=336120696, optional=True,) + max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) + order_by = proto.Field(proto.STRING, number=160562920, optional=True,) + page_token = proto.Field(proto.STRING, number=19994697, optional=True,) + project = proto.Field(proto.STRING, number=227560217,) + region = proto.Field(proto.STRING, number=138946292,) + return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) + + +class ListTargetSslProxiesRequest(proto.Message): + r"""A request message for TargetSslProxies.List. See the method + description for details. + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either ``=``, + ``!=``, ``>``, or ``<``. For example, if you are filtering + Compute Engine instances, you can exclude instances named + ``example-instance`` by specifying + ``name != example-instance``. You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. You can also sort results in descending order based on + the creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter = proto.Field(proto.STRING, number=336120696, optional=True,) + max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) + order_by = proto.Field(proto.STRING, number=160562920, optional=True,) + page_token = proto.Field(proto.STRING, number=19994697, optional=True,) + project = proto.Field(proto.STRING, number=227560217,) + return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) + + +class ListTargetTcpProxiesRequest(proto.Message): + r"""A request message for TargetTcpProxies.List. See the method + description for details. + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. The expression must specify the field name, a + comparison operator, and the value that you want to use for + filtering. The value must be a string, a number, or a + boolean. The comparison operator must be either ``=``, + ``!=``, ``>``, or ``<``. For example, if you are filtering + Compute Engine instances, you can exclude instances named + ``example-instance`` by specifying + ``name != example-instance``. You can also filter nested + fields. For example, you could specify + ``scheduling.automaticRestart = false`` to include instances + only if they are not scheduled for automatic restarts. You + can use filtering on nested fields to filter based on + resource labels. To filter on multiple expressions, provide + each separate expression within parentheses. For example: + ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29400,73 +33461,8 @@ class ListTargetPoolsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - project (str): - Project ID for this request. - region (str): - Name of the region scoping this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - """ - - filter = proto.Field(proto.STRING, number=336120696, optional=True,) - max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) - order_by = proto.Field(proto.STRING, number=160562920, optional=True,) - page_token = proto.Field(proto.STRING, number=19994697, optional=True,) - project = proto.Field(proto.STRING, number=227560217,) - region = proto.Field(proto.STRING, number=138946292,) - return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) - - -class ListTargetSslProxiesRequest(proto.Message): - r"""A request message for TargetSslProxies.List. See the method - description for details. - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either ``=``, - ``!=``, ``>``, or ``<``. For example, if you are filtering - Compute Engine instances, you can exclude instances named - ``example-instance`` by specifying - ``name != example-instance``. You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29478,80 +33474,22 @@ class ListTargetSslProxiesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - """ - filter = proto.Field(proto.STRING, number=336120696, optional=True,) - max_results = proto.Field(proto.UINT32, number=54715419, optional=True,) - order_by = proto.Field(proto.STRING, number=160562920, optional=True,) - page_token = proto.Field(proto.STRING, number=19994697, optional=True,) - project = proto.Field(proto.STRING, number=227560217,) - return_partial_success = proto.Field(proto.BOOL, number=517198390, optional=True,) - - -class ListTargetTcpProxiesRequest(proto.Message): - r"""A request message for TargetTcpProxies.List. See the method - description for details. - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. The expression must specify the field name, a - comparison operator, and the value that you want to use for - filtering. The value must be a string, a number, or a - boolean. The comparison operator must be either ``=``, - ``!=``, ``>``, or ``<``. For example, if you are filtering - Compute Engine instances, you can exclude instances named - ``example-instance`` by specifying - ``name != example-instance``. You can also filter nested - fields. For example, you could specify - ``scheduling.automaticRestart = false`` to include instances - only if they are not scheduled for automatic restarts. You - can use filtering on nested fields to filter based on - resource labels. To filter on multiple expressions, provide - each separate expression within parentheses. For example: - ``(scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake")`` - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. You can also sort results in descending order based on - the creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29588,6 +33526,8 @@ class ListTargetVpnGatewaysRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29595,6 +33535,8 @@ class ListTargetVpnGatewaysRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29606,10 +33548,14 @@ class ListTargetVpnGatewaysRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -29618,6 +33564,8 @@ class ListTargetVpnGatewaysRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29655,6 +33603,8 @@ class ListUrlMapsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29662,6 +33612,8 @@ class ListUrlMapsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29673,16 +33625,22 @@ class ListUrlMapsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29719,6 +33677,8 @@ class ListUsableSubnetworksRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29726,6 +33686,8 @@ class ListUsableSubnetworksRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29737,16 +33699,22 @@ class ListUsableSubnetworksRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29783,6 +33751,8 @@ class ListVpnGatewaysRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29790,6 +33760,8 @@ class ListVpnGatewaysRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29801,10 +33773,14 @@ class ListVpnGatewaysRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -29813,6 +33789,8 @@ class ListVpnGatewaysRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29850,6 +33828,8 @@ class ListVpnTunnelsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29857,6 +33837,8 @@ class ListVpnTunnelsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29868,10 +33850,14 @@ class ListVpnTunnelsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. region (str): @@ -29880,6 +33866,8 @@ class ListVpnTunnelsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29917,6 +33905,8 @@ class ListXpnHostsProjectsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29924,6 +33914,8 @@ class ListXpnHostsProjectsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -29935,10 +33927,14 @@ class ListXpnHostsProjectsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. projects_list_xpn_hosts_request_resource (google.cloud.compute_v1.types.ProjectsListXpnHostsRequest): @@ -29947,6 +33943,8 @@ class ListXpnHostsProjectsRequest(proto.Message): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -29986,6 +33984,8 @@ class ListZoneOperationsRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -29993,6 +33993,8 @@ class ListZoneOperationsRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -30004,16 +34006,22 @@ class ListZoneOperationsRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. zone (str): Name of the zone for request. """ @@ -30053,6 +34061,8 @@ class ListZonesRequest(proto.Message): However, you can include ``AND`` and ``OR`` expressions explicitly. For example: ``(cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true)`` + + This field is a member of `oneof`_ ``_filter``. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -30060,6 +34070,8 @@ class ListZonesRequest(proto.Message): that can be used to get the next page of results in subsequent list requests. Acceptable values are ``0`` to ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -30071,16 +34083,22 @@ class ListZonesRequest(proto.Message): resources like operations so that the newest operation is returned first. Currently, only sorting by ``name`` or ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. page_token (str): Specifies a page token to use. Set ``pageToken`` to the ``nextPageToken`` returned by a previous list request to get the next page of results. + + This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The default value is false. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter = proto.Field(proto.STRING, number=336120696, optional=True,) @@ -30093,17 +34111,24 @@ class ListZonesRequest(proto.Message): class LocalDisk(proto.Message): r""" + Attributes: disk_count (int): Specifies the number of such disks. + + This field is a member of `oneof`_ ``_disk_count``. disk_size_gb (int): Specifies the size of the disk in base-2 GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. disk_type (str): Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. + + This field is a member of `oneof`_ ``_disk_type``. """ disk_count = proto.Field(proto.INT32, number=182933485, optional=True,) @@ -30131,10 +34156,13 @@ class LocationPolicy(proto.Message): class LocationPolicyLocation(proto.Message): r""" + Attributes: preference (google.cloud.compute_v1.types.LocationPolicyLocation.Preference): Preference for a given location: ALLOW or DENY. + + This field is a member of `oneof`_ ``_preference``. """ class Preference(proto.Enum): @@ -30151,16 +34179,23 @@ class Preference(proto.Enum): class LogConfig(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: cloud_audit (google.cloud.compute_v1.types.LogConfigCloudAuditOptions): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_cloud_audit``. counter (google.cloud.compute_v1.types.LogConfigCounterOptions): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_counter``. data_access (google.cloud.compute_v1.types.LogConfigDataAccessOptions): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_data_access``. """ cloud_audit = proto.Field( @@ -30185,13 +34220,18 @@ class LogConfig(proto.Message): class LogConfigCloudAuditOptions(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: authorization_logging_options (google.cloud.compute_v1.types.AuthorizationLoggingOptions): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_authorization_logging_options``. log_name (google.cloud.compute_v1.types.LogConfigCloudAuditOptions.LogName): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_log_name``. """ class LogName(proto.Enum): @@ -30212,6 +34252,7 @@ class LogName(proto.Enum): class LogConfigCounterOptions(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: custom_fields (Sequence[google.cloud.compute_v1.types.LogConfigCounterOptionsCustomField]): This is deprecated and has no effect. Do not @@ -30219,9 +34260,13 @@ class LogConfigCounterOptions(proto.Message): field (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_field``. metric (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_metric``. """ custom_fields = proto.RepeatedField( @@ -30233,13 +34278,18 @@ class LogConfigCounterOptions(proto.Message): class LogConfigCounterOptionsCustomField(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: name (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_name``. value (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_value``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -30248,10 +34298,13 @@ class LogConfigCounterOptionsCustomField(proto.Message): class LogConfigDataAccessOptions(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: log_mode (google.cloud.compute_v1.types.LogConfigDataAccessOptions.LogMode): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_log_mode``. """ class LogMode(proto.Enum): @@ -30274,46 +34327,74 @@ class MachineType(proto.Message): to this machine type. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): [Output Only] The deprecation status associated with this machine type. Only applicable if the machine type is unavailable. + + This field is a member of `oneof`_ ``_deprecated``. description (str): [Output Only] An optional textual description of the resource. + + This field is a member of `oneof`_ ``_description``. guest_cpus (int): [Output Only] The number of virtual CPUs that are available to the instance. + + This field is a member of `oneof`_ ``_guest_cpus``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. image_space_gb (int): [Deprecated] This property is deprecated and will never be populated with any relevant values. + + This field is a member of `oneof`_ ``_image_space_gb``. is_shared_cpu (bool): [Output Only] Whether this machine type has a shared CPU. See Shared-core machine types for more information. + + This field is a member of `oneof`_ ``_is_shared_cpu``. kind (str): [Output Only] The type of the resource. Always compute#machineType for machine types. + + This field is a member of `oneof`_ ``_kind``. maximum_persistent_disks (int): [Output Only] Maximum persistent disks allowed. + + This field is a member of `oneof`_ ``_maximum_persistent_disks``. maximum_persistent_disks_size_gb (int): [Output Only] Maximum total persistent disks size (GB) allowed. + + This field is a member of `oneof`_ ``_maximum_persistent_disks_size_gb``. memory_mb (int): [Output Only] The amount of physical memory available to the instance, defined in MB. + + This field is a member of `oneof`_ ``_memory_mb``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. scratch_disks (Sequence[google.cloud.compute_v1.types.ScratchDisks]): [Output Only] A list of extended scratch disks assigned to the instance. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. zone (str): [Output Only] The name of the zone where the machine type resides, such as us-central1-a. + + This field is a member of `oneof`_ ``_zone``. """ accelerators = proto.RepeatedField( @@ -30346,16 +34427,21 @@ class MachineType(proto.Message): class MachineTypeAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.MachineTypeAggregatedList.ItemsEntry]): A list of MachineTypesScopedList resources. kind (str): [Output Only] Type of resource. Always compute#machineTypeAggregatedList for aggregated lists of machine types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -30363,12 +34449,18 @@ class MachineTypeAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -30390,15 +34482,20 @@ def raw_page(self): class MachineTypeList(proto.Message): r"""Contains a list of machine types. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.MachineType]): A list of MachineType resources. kind (str): [Output Only] Type of resource. Always compute#machineTypeList for lists of machine types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -30406,10 +34503,16 @@ class MachineTypeList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -30428,6 +34531,7 @@ def raw_page(self): class MachineTypesScopedList(proto.Message): r""" + Attributes: machine_types (Sequence[google.cloud.compute_v1.types.MachineType]): [Output Only] A list of machine types contained in this @@ -30435,6 +34539,8 @@ class MachineTypesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that appears when the machine types list is empty. + + This field is a member of `oneof`_ ``_warning``. """ machine_types = proto.RepeatedField( @@ -30447,6 +34553,7 @@ class MachineTypesScopedList(proto.Message): class ManagedInstance(proto.Message): r"""A Managed Instance resource. + Attributes: current_action (google.cloud.compute_v1.types.ManagedInstance.CurrentAction): [Output Only] The current action that the managed instance @@ -30473,28 +34580,44 @@ class ManagedInstance(proto.Message): that instance. - VERIFYING The managed instance group has created the instance and it is in the process of being verified. + + This field is a member of `oneof`_ ``_current_action``. id (int): [Output only] The unique identifier for this resource. This field is empty when instance does not exist. + + This field is a member of `oneof`_ ``_id``. instance (str): [Output Only] The URL of the instance. The URL can exist even if the instance has not yet been created. + + This field is a member of `oneof`_ ``_instance``. instance_health (Sequence[google.cloud.compute_v1.types.ManagedInstanceInstanceHealth]): [Output Only] Health state of the instance per health-check. instance_status (google.cloud.compute_v1.types.ManagedInstance.InstanceStatus): [Output Only] The status of the instance. This field is empty when the instance does not exist. + + This field is a member of `oneof`_ ``_instance_status``. last_attempt (google.cloud.compute_v1.types.ManagedInstanceLastAttempt): [Output Only] Information about the last attempt to create or delete the instance. + + This field is a member of `oneof`_ ``_last_attempt``. preserved_state_from_config (google.cloud.compute_v1.types.PreservedState): [Output Only] Preserved state applied from per-instance config for this instance. + + This field is a member of `oneof`_ ``_preserved_state_from_config``. preserved_state_from_policy (google.cloud.compute_v1.types.PreservedState): [Output Only] Preserved state generated based on stateful policy for this instance. + + This field is a member of `oneof`_ ``_preserved_state_from_policy``. version (google.cloud.compute_v1.types.ManagedInstanceVersion): [Output Only] Intended version of this instance. + + This field is a member of `oneof`_ ``_version``. """ class CurrentAction(proto.Enum): @@ -30579,12 +34702,17 @@ class InstanceStatus(proto.Enum): class ManagedInstanceInstanceHealth(proto.Message): r""" + Attributes: detailed_health_state (google.cloud.compute_v1.types.ManagedInstanceInstanceHealth.DetailedHealthState): [Output Only] The current detailed instance health state. + + This field is a member of `oneof`_ ``_detailed_health_state``. health_check (str): [Output Only] The URL for the health check that verifies whether the instance is healthy. + + This field is a member of `oneof`_ ``_health_check``. """ class DetailedHealthState(proto.Enum): @@ -30604,10 +34732,13 @@ class DetailedHealthState(proto.Enum): class ManagedInstanceLastAttempt(proto.Message): r""" + Attributes: errors (google.cloud.compute_v1.types.Errors): [Output Only] Encountered errors during the last attempt to create or delete the instance. + + This field is a member of `oneof`_ ``_errors``. """ errors = proto.Field( @@ -30617,13 +34748,18 @@ class ManagedInstanceLastAttempt(proto.Message): class ManagedInstanceVersion(proto.Message): r""" + Attributes: instance_template (str): [Output Only] The intended template of the instance. This field is empty when current_action is one of { DELETING, ABANDONING }. + + This field is a member of `oneof`_ ``_instance_template``. name (str): [Output Only] Name of the version. + + This field is a member of `oneof`_ ``_name``. """ instance_template = proto.Field(proto.STRING, number=309248228, optional=True,) @@ -30632,6 +34768,7 @@ class ManagedInstanceVersion(proto.Message): class Metadata(proto.Message): r"""A metadata key/value entry. + Attributes: fingerprint (str): Specifies a fingerprint for this request, @@ -30645,12 +34782,16 @@ class Metadata(proto.Message): fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the resource. + + This field is a member of `oneof`_ ``_fingerprint``. items (Sequence[google.cloud.compute_v1.types.Items]): Array of key/value pairs. The total size of all keys and values must be less than 512 KB. kind (str): [Output Only] Type of the resource. Always compute#metadata for metadata. + + This field is a member of `oneof`_ ``_kind``. """ fingerprint = proto.Field(proto.STRING, number=234678500, optional=True,) @@ -30687,6 +34828,8 @@ class MetadataFilter(proto.Message): least one of the filterLabels must have a matching label in the provided metadata. - MATCH_ALL: All filterLabels must have matching labels in the provided metadata. + + This field is a member of `oneof`_ ``_filter_match_criteria``. """ class FilterMatchCriteria(proto.Enum): @@ -30719,10 +34862,14 @@ class MetadataFilterLabelMatch(proto.Message): Name of metadata label. The name can have a maximum length of 1024 characters and must be at least 1 character long. + + This field is a member of `oneof`_ ``_name``. value (str): The value of the label must match the specified value. value can have a maximum length of 1024 characters. + + This field is a member of `oneof`_ ``_value``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -30754,6 +34901,8 @@ class MoveDiskProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ disk_move_request_resource = proto.Field( @@ -30772,6 +34921,8 @@ class MoveFirewallPolicyRequest(proto.Message): Name of the firewall policy to update. parent_id (str): The new parent of the firewall policy. + + This field is a member of `oneof`_ ``_parent_id``. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30788,6 +34939,8 @@ class MoveFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -30820,6 +34973,8 @@ class MoveInstanceProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_move_request_resource = proto.Field( @@ -30831,14 +34986,19 @@ class MoveInstanceProjectRequest(proto.Message): class NamedPort(proto.Message): r"""The named port. For example: <"http", 80>. + Attributes: name (str): The name for this named port. The name must be 1-63 characters long, and comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. port (int): The port number, which can be a value between 1 and 65535. + + This field is a member of `oneof`_ ``_port``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -30858,6 +35018,8 @@ class Network(proto.Message): specification, for example: 192.168.0.0/16. Provided by the client when the network is created. + + This field is a member of `oneof`_ ``_I_pv4_range``. auto_create_subnetworks (bool): Must be set to create a VPC network. If not set, a legacy network is created. When set to @@ -30869,25 +35031,39 @@ class Network(proto.Message): VPC network IP ranges. For custom mode VPC networks, you can add subnets using the subnetworks insert method. + + This field is a member of `oneof`_ ``_auto_create_subnetworks``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. gateway_i_pv4 (str): [Output Only] The gateway address for default routing out of the network, selected by GCP. + + This field is a member of `oneof`_ ``_gateway_i_pv4``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#network for networks. + + This field is a member of `oneof`_ ``_kind``. mtu (int): Maximum Transmission Unit in bytes. The minimum value for this field is 1460 and the maximum value is 1500 bytes. If unspecified, defaults to 1460. + + This field is a member of `oneof`_ ``_mtu``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -30898,6 +35074,8 @@ class Network(proto.Message): the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit. + + This field is a member of `oneof`_ ``_name``. peerings (Sequence[google.cloud.compute_v1.types.NetworkPeering]): [Output Only] A list of network peerings for the resource. routing_config (google.cloud.compute_v1.types.NetworkRoutingConfig): @@ -30905,8 +35083,12 @@ class Network(proto.Message): this network. Used by Cloud Router to determine what type of network-wide routing behavior to enforce. + + This field is a member of `oneof`_ ``_routing_config``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. subnetworks (Sequence[str]): [Output Only] Server-defined fully-qualified URLs for all subnetworks in this VPC network. @@ -30933,6 +35115,7 @@ class Network(proto.Message): class NetworkEndpoint(proto.Message): r"""The network endpoint. + Attributes: annotations (Sequence[google.cloud.compute_v1.types.NetworkEndpoint.AnnotationsEntry]): Metadata defined as annotations on the @@ -30942,12 +35125,16 @@ class NetworkEndpoint(proto.Message): This can only be specified when NetworkEndpointGroup.network_endpoint_type is NON_GCP_FQDN_PORT. + + This field is a member of `oneof`_ ``_fqdn``. instance (str): The name for a specific VM instance that the IP address belongs to. This is required for network endpoints of type GCE_VM_IP_PORT. The instance must be in the same zone of network endpoint group. The name must be 1-63 characters long, and comply with RFC1035. + + This field is a member of `oneof`_ ``_instance``. ip_address (str): Optional IPv4 address of network endpoint. The IP address must belong to a VM in Compute @@ -30956,10 +35143,14 @@ class NetworkEndpoint(proto.Message): specified, then the primary IP address for the VM instance in the network that the network endpoint group belongs to will be used. + + This field is a member of `oneof`_ ``_ip_address``. port (int): Optional port number of network endpoint. If not specified, the defaultPort for the network endpoint group will be used. + + This field is a member of `oneof`_ ``_port``. """ annotations = proto.MapField(proto.STRING, proto.STRING, number=112032548,) @@ -30986,29 +35177,45 @@ class NetworkEndpointGroup(proto.Message): Only valid when networkEndpointType is "SERVERLESS". Only one of cloudRun, appEngine or cloudFunction may be set. + + This field is a member of `oneof`_ ``_app_engine``. cloud_function (google.cloud.compute_v1.types.NetworkEndpointGroupCloudFunction): Only valid when networkEndpointType is "SERVERLESS". Only one of cloudRun, appEngine or cloudFunction may be set. + + This field is a member of `oneof`_ ``_cloud_function``. cloud_run (google.cloud.compute_v1.types.NetworkEndpointGroupCloudRun): Only valid when networkEndpointType is "SERVERLESS". Only one of cloudRun, appEngine or cloudFunction may be set. + + This field is a member of `oneof`_ ``_cloud_run``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. default_port (int): The default port used if the port number is not specified in the network endpoint. + + This field is a member of `oneof`_ ``_default_port``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#networkEndpointGroup for network endpoint group. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, @@ -31018,29 +35225,45 @@ class NetworkEndpointGroup(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (str): The URL of the network to which all network endpoints in the NEG belong. Uses "default" project network if unspecified. + + This field is a member of `oneof`_ ``_network``. network_endpoint_type (google.cloud.compute_v1.types.NetworkEndpointGroup.NetworkEndpointType): Type of network endpoints in this network endpoint group. Can be one of GCE_VM_IP_PORT, NON_GCP_PRIVATE_IP_PORT, INTERNET_FQDN_PORT, INTERNET_IP_PORT, SERVERLESS, PRIVATE_SERVICE_CONNECT. + + This field is a member of `oneof`_ ``_network_endpoint_type``. region (str): [Output Only] The URL of the region where the network endpoint group is located. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. size (int): [Output only] Number of network endpoints in the network endpoint group. + + This field is a member of `oneof`_ ``_size``. subnetwork (str): Optional URL of the subnetwork to which all network endpoints in the NEG belong. + + This field is a member of `oneof`_ ``_subnetwork``. zone (str): [Output Only] The URL of the zone where the network endpoint group is located. + + This field is a member of `oneof`_ ``_zone``. """ class NetworkEndpointType(proto.Enum): @@ -31094,10 +35317,13 @@ class NetworkEndpointType(proto.Enum): class NetworkEndpointGroupAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NetworkEndpointGroupAggregatedList.ItemsEntry]): A list of NetworkEndpointGroupsScopedList resources. @@ -31105,6 +35331,8 @@ class NetworkEndpointGroupAggregatedList(proto.Message): [Output Only] The resource type, which is always compute#networkEndpointGroupAggregatedList for aggregated lists of network endpoint groups. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -31112,12 +35340,18 @@ class NetworkEndpointGroupAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -31153,6 +35387,8 @@ class NetworkEndpointGroupAppEngine(proto.Message): Optional serving service. The service name is case-sensitive and must be 1-63 characters long. Example value: "default", "my-service". + + This field is a member of `oneof`_ ``_service``. url_mask (str): A template to parse service and version fields from a request URL. URL mask allows for @@ -31166,10 +35402,14 @@ class NetworkEndpointGroupAppEngine(proto.Message): parse them to { service = "foo1", version = "v1" } and { service = "foo1", version = "v2" } respectively. + + This field is a member of `oneof`_ ``_url_mask``. version (str): Optional serving version. The version name is case-sensitive and must be 1-100 characters long. Example value: "v1", "v2". + + This field is a member of `oneof`_ ``_version``. """ service = proto.Field(proto.STRING, number=373540533, optional=True,) @@ -31188,6 +35428,8 @@ class NetworkEndpointGroupCloudFunction(proto.Message): A user-defined name of the Cloud Function. The function name is case-sensitive and must be 1-63 characters long. Example value: "func1". + + This field is a member of `oneof`_ ``_function``. url_mask (str): A template to parse function field from a request URL. URL mask allows for routing to @@ -31199,6 +35441,8 @@ class NetworkEndpointGroupCloudFunction(proto.Message): same Serverless NEG with URL mask "/". The URL mask will parse them to { function = "function1" } and { function = "function2" } respectively. + + This field is a member of `oneof`_ ``_url_mask``. """ function = proto.Field(proto.STRING, number=307196888, optional=True,) @@ -31218,12 +35462,16 @@ class NetworkEndpointGroupCloudRun(proto.Message): Cloud Run. The service must be 1-63 characters long, and comply with RFC1035. Example value: "run-service". + + This field is a member of `oneof`_ ``_service``. tag (str): Optional Cloud Run tag represents the "named- evision" to provide additional fine-grained traffic routing information. The tag must be 1-63 characters long, and comply with RFC1035. Example value: "revision-0010". + + This field is a member of `oneof`_ ``_tag``. url_mask (str): A template to parse service and tag fields from a request URL. URL mask allows for routing @@ -31236,6 +35484,8 @@ class NetworkEndpointGroupCloudRun(proto.Message): mask ".domain.com/". The URL mask will parse them to { service="bar1", tag="foo1" } and { service="bar2", tag="foo2" } respectively. + + This field is a member of `oneof`_ ``_url_mask``. """ service = proto.Field(proto.STRING, number=373540533, optional=True,) @@ -31245,16 +35495,21 @@ class NetworkEndpointGroupCloudRun(proto.Message): class NetworkEndpointGroupList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NetworkEndpointGroup]): A list of NetworkEndpointGroup resources. kind (str): [Output Only] The resource type, which is always compute#networkEndpointGroupList for network endpoint group lists. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -31262,10 +35517,16 @@ class NetworkEndpointGroupList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -31286,6 +35547,7 @@ def raw_page(self): class NetworkEndpointGroupsAttachEndpointsRequest(proto.Message): r""" + Attributes: network_endpoints (Sequence[google.cloud.compute_v1.types.NetworkEndpoint]): The list of network endpoints to be attached. @@ -31298,6 +35560,7 @@ class NetworkEndpointGroupsAttachEndpointsRequest(proto.Message): class NetworkEndpointGroupsDetachEndpointsRequest(proto.Message): r""" + Attributes: network_endpoints (Sequence[google.cloud.compute_v1.types.NetworkEndpoint]): The list of network endpoints to be detached. @@ -31310,6 +35573,7 @@ class NetworkEndpointGroupsDetachEndpointsRequest(proto.Message): class NetworkEndpointGroupsListEndpointsRequest(proto.Message): r""" + Attributes: health_status (google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest.HealthStatus): Optional query parameter for showing the @@ -31317,6 +35581,8 @@ class NetworkEndpointGroupsListEndpointsRequest(proto.Message): options are SKIP or SHOW. If you don't specify this parameter, the health status of network endpoints will not be provided. + + This field is a member of `oneof`_ ``_health_status``. """ class HealthStatus(proto.Enum): @@ -31336,10 +35602,13 @@ class HealthStatus(proto.Enum): class NetworkEndpointGroupsListNetworkEndpoints(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NetworkEndpointWithHealthStatus]): A list of NetworkEndpointWithHealthStatus resources. @@ -31348,6 +35617,8 @@ class NetworkEndpointGroupsListNetworkEndpoints(proto.Message): compute#networkEndpointGroupsListNetworkEndpoints for the list of network endpoints in the specified network endpoint group. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -31355,8 +35626,12 @@ class NetworkEndpointGroupsListNetworkEndpoints(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -31376,6 +35651,7 @@ def raw_page(self): class NetworkEndpointGroupsScopedList(proto.Message): r""" + Attributes: network_endpoint_groups (Sequence[google.cloud.compute_v1.types.NetworkEndpointGroup]): [Output Only] The list of network endpoint groups that are @@ -31383,6 +35659,8 @@ class NetworkEndpointGroupsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that replaces the list of network endpoint groups when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ network_endpoint_groups = proto.RepeatedField( @@ -31395,11 +35673,14 @@ class NetworkEndpointGroupsScopedList(proto.Message): class NetworkEndpointWithHealthStatus(proto.Message): r""" + Attributes: healths (Sequence[google.cloud.compute_v1.types.HealthStatusForNetworkEndpoint]): [Output only] The health status of network endpoint; network_endpoint (google.cloud.compute_v1.types.NetworkEndpoint): [Output only] The network endpoint; + + This field is a member of `oneof`_ ``_network_endpoint``. """ healths = proto.RepeatedField( @@ -31412,6 +35693,7 @@ class NetworkEndpointWithHealthStatus(proto.Message): class NetworkInterface(proto.Message): r"""A network interface resource attached to an instance. + Attributes: access_configs (Sequence[google.cloud.compute_v1.types.AccessConfig]): An array of configurations for this interface. Currently, @@ -31432,6 +35714,8 @@ class NetworkInterface(proto.Message): error 400 Bad Request if the fingerprint is not provided, or 412 Precondition Failed if the fingerprint is out of date. + + This field is a member of `oneof`_ ``_fingerprint``. ipv6_access_configs (Sequence[google.cloud.compute_v1.types.AccessConfig]): An array of IPv6 access configurations for this interface. Currently, only one IPv6 access config, DIRECT_IPV6, is @@ -31442,16 +35726,24 @@ class NetworkInterface(proto.Message): the IP can be accessed from the Internet. This field is always inherited from its subnetwork. Valid only if stackType is IPV4_IPV6. + + This field is a member of `oneof`_ ``_ipv6_access_type``. ipv6_address (str): [Output Only] An IPv6 internal network address for this network interface. + + This field is a member of `oneof`_ ``_ipv6_address``. kind (str): [Output Only] Type of the resource. Always compute#networkInterface for network interfaces. + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] The name of the network interface, which is generated by the server. For network devices, these are eth0, eth1, etc. + + This field is a member of `oneof`_ ``_name``. network (str): URL of the network resource for this instance. When creating an instance, if neither @@ -31466,25 +35758,35 @@ class NetworkInterface(proto.Message): network - projects/project/global/networks/network - global/networks/default + + This field is a member of `oneof`_ ``_network``. network_i_p (str): An IPv4 internal IP address to assign to the instance for this network interface. If not specified by the user, an unused internal IP is assigned by the system. + + This field is a member of `oneof`_ ``_network_i_p``. nic_type (google.cloud.compute_v1.types.NetworkInterface.NicType): The type of vNIC to be used on this interface. This may be gVNIC or VirtioNet. + + This field is a member of `oneof`_ ``_nic_type``. queue_count (int): The networking queue count that's specified by users for the network interface. Both Rx and Tx queues will be set to this number. It'll be empty if not specified by the users. + + This field is a member of `oneof`_ ``_queue_count``. stack_type (google.cloud.compute_v1.types.NetworkInterface.StackType): The stack type for this network interface to identify whether the IPv6 feature is enabled or not. If not specified, IPV4_ONLY will be used. This field can be both set at instance creation and update network interface operations. + + This field is a member of `oneof`_ ``_stack_type``. subnetwork (str): The URL of the Subnetwork resource for this instance. If the network resource is in legacy @@ -31498,6 +35800,8 @@ class NetworkInterface(proto.Message): https://www.googleapis.com/compute/v1/projects/project/regions/region /subnetworks/subnetwork - regions/region/subnetworks/subnetwork + + This field is a member of `oneof`_ ``_subnetwork``. """ class Ipv6AccessType(proto.Enum): @@ -31557,15 +35861,20 @@ class StackType(proto.Enum): class NetworkList(proto.Message): r"""Contains a list of networks. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Network]): A list of Network resources. kind (str): [Output Only] Type of resource. Always compute#networkList for lists of networks. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -31573,10 +35882,16 @@ class NetworkList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -31608,6 +35923,8 @@ class NetworkPeering(proto.Message): be true since Google Compute Engine will automatically create and manage subnetwork routes between two networks when peering state is ACTIVE. + + This field is a member of `oneof`_ ``_auto_create_routes``. exchange_subnet_routes (bool): Indicates whether full mesh connectivity is created and managed automatically between peered @@ -31616,23 +35933,33 @@ class NetworkPeering(proto.Message): automatically create and manage subnetwork routes between two networks when peering state is ACTIVE. + + This field is a member of `oneof`_ ``_exchange_subnet_routes``. export_custom_routes (bool): Whether to export the custom routes to peer network. + + This field is a member of `oneof`_ ``_export_custom_routes``. export_subnet_routes_with_public_ip (bool): Whether subnet routes with public IP range are exported. The default value is true, all subnet routes are exported. IPv4 special-use ranges are always exported to peers and are not controlled by this field. + + This field is a member of `oneof`_ ``_export_subnet_routes_with_public_ip``. import_custom_routes (bool): Whether to import the custom routes from peer network. + + This field is a member of `oneof`_ ``_import_custom_routes``. import_subnet_routes_with_public_ip (bool): Whether subnet routes with public IP range are imported. The default value is false. IPv4 special-use ranges are always imported from peers and are not controlled by this field. + + This field is a member of `oneof`_ ``_import_subnet_routes_with_public_ip``. name (str): Name of this peering. Provided by the client when the peering is created. The name must comply with RFC1035. @@ -31641,6 +35968,8 @@ class NetworkPeering(proto.Message): first character must be a lowercase letter, and all the following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (str): The URL of the peer network. It can be either full URL or partial URL. The peer network may @@ -31648,15 +35977,23 @@ class NetworkPeering(proto.Message): URL does not contain project, it is assumed that the peer network is in the same project as the current network. + + This field is a member of `oneof`_ ``_network``. peer_mtu (int): Maximum Transmission Unit in bytes. + + This field is a member of `oneof`_ ``_peer_mtu``. state (google.cloud.compute_v1.types.NetworkPeering.State): [Output Only] State for the peering, either ``ACTIVE`` or ``INACTIVE``. The peering is ``ACTIVE`` when there's a matching configuration in the peer network. + + This field is a member of `oneof`_ ``_state``. state_details (str): [Output Only] Details about the current state of the peering. + + This field is a member of `oneof`_ ``_state_details``. """ class State(proto.Enum): @@ -31700,6 +36037,8 @@ class NetworkRoutingConfig(proto.Message): to GLOBAL, this network's Cloud Routers will advertise routes with all subnets of this network, across regions. + + This field is a member of `oneof`_ ``_routing_mode``. """ class RoutingMode(proto.Enum): @@ -31720,6 +36059,7 @@ class RoutingMode(proto.Enum): class NetworksAddPeeringRequest(proto.Message): r""" + Attributes: auto_create_routes (bool): This field will be deprecated soon. Use @@ -31729,9 +36069,13 @@ class NetworksAddPeeringRequest(proto.Message): should always be true since Google Compute Engine will automatically create and manage subnetwork routes between two networks when peering state is ACTIVE. + + This field is a member of `oneof`_ ``_auto_create_routes``. name (str): Name of the peering, which should conform to RFC1035. + + This field is a member of `oneof`_ ``_name``. network_peering (google.cloud.compute_v1.types.NetworkPeering): Network peering parameters. In order to specify route policies for peering using import and export custom routes, @@ -31739,6 +36083,8 @@ class NetworksAddPeeringRequest(proto.Message): network, exchange_subnet_routes) in the network_peering field. The corresponding fields in NetworksAddPeeringRequest will be deprecated soon. + + This field is a member of `oneof`_ ``_network_peering``. peer_network (str): URL of the peer network. It can be either full URL or partial URL. The peer network may @@ -31746,6 +36092,8 @@ class NetworksAddPeeringRequest(proto.Message): URL does not contain project, it is assumed that the peer network is in the same project as the current network. + + This field is a member of `oneof`_ ``_peer_network``. """ auto_create_routes = proto.Field(proto.BOOL, number=57454941, optional=True,) @@ -31758,6 +36106,7 @@ class NetworksAddPeeringRequest(proto.Message): class NetworksGetEffectiveFirewallsResponse(proto.Message): r""" + Attributes: firewall_policys (Sequence[google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy]): Effective firewalls from firewall policy. @@ -31777,18 +36126,27 @@ class NetworksGetEffectiveFirewallsResponse(proto.Message): class NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy(proto.Message): r""" + Attributes: display_name (str): [Output Only] Deprecated, please use short name instead. The display name of the firewall policy. + + This field is a member of `oneof`_ ``_display_name``. name (str): [Output Only] The name of the firewall policy. + + This field is a member of `oneof`_ ``_name``. rules (Sequence[google.cloud.compute_v1.types.FirewallPolicyRule]): The rules that apply to the network. short_name (str): [Output Only] The short name of the firewall policy. + + This field is a member of `oneof`_ ``_short_name``. type_ (google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy.Type): [Output Only] The type of the firewall policy. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -31809,10 +36167,13 @@ class Type(proto.Enum): class NetworksRemovePeeringRequest(proto.Message): r""" + Attributes: name (str): Name of the peering, which should conform to RFC1035. + + This field is a member of `oneof`_ ``_name``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -31820,9 +36181,11 @@ class NetworksRemovePeeringRequest(proto.Message): class NetworksUpdatePeeringRequest(proto.Message): r""" + Attributes: network_peering (google.cloud.compute_v1.types.NetworkPeering): + This field is a member of `oneof`_ ``_network_peering``. """ network_peering = proto.Field( @@ -31841,34 +36204,50 @@ class NodeGroup(proto.Message): Attributes: autoscaling_policy (google.cloud.compute_v1.types.NodeGroupAutoscalingPolicy): Specifies how autoscaling should behave. + + This field is a member of `oneof`_ ``_autoscaling_policy``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] The type of the resource. Always compute#nodeGroup for node group. + + This field is a member of `oneof`_ ``_kind``. location_hint (str): An opaque location hint used to place the Node close to other resources. This field is for use by internal tools that use the public API. The location hint here on the NodeGroup overrides any location_hint present in the NodeTemplate. + + This field is a member of `oneof`_ ``_location_hint``. maintenance_policy (google.cloud.compute_v1.types.NodeGroup.MaintenancePolicy): Specifies how to handle instances when a node in the group undergoes maintenance. Set to one of: DEFAULT, RESTART_IN_PLACE, or MIGRATE_WITHIN_NODE_GROUP. The default value is DEFAULT. For more information, see Maintenance policies. + + This field is a member of `oneof`_ ``_maintenance_policy``. maintenance_window (google.cloud.compute_v1.types.NodeGroupMaintenanceWindow): + This field is a member of `oneof`_ ``_maintenance_window``. name (str): The name of the resource, provided by the client when initially creating the resource. The resource name must be @@ -31878,18 +36257,29 @@ class NodeGroup(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. node_template (str): URL of the node template to create the node group from. + + This field is a member of `oneof`_ ``_node_template``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. size (int): [Output Only] The total number of nodes in the node group. + + This field is a member of `oneof`_ ``_size``. status (google.cloud.compute_v1.types.NodeGroup.Status): + This field is a member of `oneof`_ ``_status``. zone (str): [Output Only] The name of the zone where the node group resides, such as us-central1-a. + + This field is a member of `oneof`_ ``_zone``. """ class MaintenancePolicy(proto.Enum): @@ -31943,16 +36333,21 @@ class Status(proto.Enum): class NodeGroupAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeGroupAggregatedList.ItemsEntry]): A list of NodeGroupsScopedList resources. kind (str): [Output Only] Type of resource.Always compute#nodeGroupAggregatedList for aggregated lists of node groups. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -31960,12 +36355,18 @@ class NodeGroupAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -31987,17 +36388,24 @@ def raw_page(self): class NodeGroupAutoscalingPolicy(proto.Message): r""" + Attributes: max_nodes (int): The maximum number of nodes that the group should have. Must be set if autoscaling is enabled. Maximum value allowed is 100. + + This field is a member of `oneof`_ ``_max_nodes``. min_nodes (int): The minimum number of nodes that the group should have. + + This field is a member of `oneof`_ ``_min_nodes``. mode (google.cloud.compute_v1.types.NodeGroupAutoscalingPolicy.Mode): The autoscaling mode. Set to one of: ON, OFF, or ONLY_SCALE_OUT. For more information, see Autoscaler modes. + + This field is a member of `oneof`_ ``_mode``. """ class Mode(proto.Enum): @@ -32017,15 +36425,20 @@ class Mode(proto.Enum): class NodeGroupList(proto.Message): r"""Contains a list of nodeGroups. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeGroup]): A list of NodeGroup resources. kind (str): [Output Only] Type of resource.Always compute#nodeGroupList for lists of node groups. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32033,10 +36446,16 @@ class NodeGroupList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32062,11 +36481,15 @@ class NodeGroupMaintenanceWindow(proto.Message): [Output only] A predetermined duration for the window, automatically chosen to be the smallest possible in the given scenario. + + This field is a member of `oneof`_ ``_maintenance_duration``. start_time (str): Start time of the window. This must be in UTC format that resolves to one of 00:00, 04:00, 08:00, 12:00, 16:00, or 20:00. For example, both 13:00-5 and 08:00 are valid. + + This field is a member of `oneof`_ ``_start_time``. """ maintenance_duration = proto.Field( @@ -32077,27 +36500,41 @@ class NodeGroupMaintenanceWindow(proto.Message): class NodeGroupNode(proto.Message): r""" + Attributes: accelerators (Sequence[google.cloud.compute_v1.types.AcceleratorConfig]): Accelerators for this node. cpu_overcommit_type (google.cloud.compute_v1.types.NodeGroupNode.CpuOvercommitType): CPU overcommit. + + This field is a member of `oneof`_ ``_cpu_overcommit_type``. disks (Sequence[google.cloud.compute_v1.types.LocalDisk]): Local disk configurations. instances (Sequence[str]): Instances scheduled on this node. name (str): The name of the node. + + This field is a member of `oneof`_ ``_name``. node_type (str): The type of this node. + + This field is a member of `oneof`_ ``_node_type``. satisfies_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. server_binding (google.cloud.compute_v1.types.ServerBinding): Binding properties for the physical server. + + This field is a member of `oneof`_ ``_server_binding``. server_id (str): Server ID associated with this node. + + This field is a member of `oneof`_ ``_server_id``. status (google.cloud.compute_v1.types.NodeGroupNode.Status): + This field is a member of `oneof`_ ``_status``. """ class CpuOvercommitType(proto.Enum): @@ -32136,10 +36573,13 @@ class Status(proto.Enum): class NodeGroupsAddNodesRequest(proto.Message): r""" + Attributes: additional_node_count (int): Count of additional nodes to be added to the node group. + + This field is a member of `oneof`_ ``_additional_node_count``. """ additional_node_count = proto.Field(proto.INT32, number=134997930, optional=True,) @@ -32147,6 +36587,7 @@ class NodeGroupsAddNodesRequest(proto.Message): class NodeGroupsDeleteNodesRequest(proto.Message): r""" + Attributes: nodes (Sequence[str]): Names of the nodes to delete. @@ -32157,16 +36598,21 @@ class NodeGroupsDeleteNodesRequest(proto.Message): class NodeGroupsListNodes(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeGroupNode]): A list of Node resources. kind (str): [Output Only] The resource type, which is always compute.nodeGroupsListNodes for the list of nodes in the specified node group. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32174,10 +36620,16 @@ class NodeGroupsListNodes(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32198,12 +36650,15 @@ def raw_page(self): class NodeGroupsScopedList(proto.Message): r""" + Attributes: node_groups (Sequence[google.cloud.compute_v1.types.NodeGroup]): [Output Only] A list of node groups contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that appears when the nodeGroup list is empty. + + This field is a member of `oneof`_ ``_warning``. """ node_groups = proto.RepeatedField( @@ -32216,10 +36671,13 @@ class NodeGroupsScopedList(proto.Message): class NodeGroupsSetNodeTemplateRequest(proto.Message): r""" + Attributes: node_template (str): Full or partial URL of the node template resource to be updated for this node group. + + This field is a member of `oneof`_ ``_node_template``. """ node_template = proto.Field(proto.STRING, number=323154455, optional=True,) @@ -32235,20 +36693,30 @@ class NodeTemplate(proto.Message): cpu_overcommit_type (google.cloud.compute_v1.types.NodeTemplate.CpuOvercommitType): CPU overcommit. + + This field is a member of `oneof`_ ``_cpu_overcommit_type``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. disks (Sequence[google.cloud.compute_v1.types.LocalDisk]): id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] The type of the resource. Always compute#nodeTemplate for node templates. + + This field is a member of `oneof`_ ``_kind``. name (str): The name of the resource, provided by the client when initially creating the resource. The resource name must be @@ -32258,23 +36726,33 @@ class NodeTemplate(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. node_affinity_labels (Sequence[google.cloud.compute_v1.types.NodeTemplate.NodeAffinityLabelsEntry]): Labels to use for node affinity, which will be used in instance scheduling. node_type (str): The node type to use for nodes group that are created from this template. + + This field is a member of `oneof`_ ``_node_type``. node_type_flexibility (google.cloud.compute_v1.types.NodeTemplateNodeTypeFlexibility): The flexible properties of the desired node type. Node groups that use this node template will create nodes of a type that matches these properties. This field is mutually exclusive with the node_type property; you can only define one or the other, but not both. + + This field is a member of `oneof`_ ``_node_type_flexibility``. region (str): [Output Only] The name of the region where the node template resides, such as us-central1. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. server_binding (google.cloud.compute_v1.types.ServerBinding): Sets the binding properties for the physical server. Valid values include: - *[Default]* RESTART_NODE_ON_ANY_SERVER: @@ -32282,12 +36760,18 @@ class NodeTemplate(proto.Message): RESTART_NODE_ON_MINIMAL_SERVER: Restarts VMs on the same physical server whenever possible See Sole-tenant node options for more information. + + This field is a member of `oneof`_ ``_server_binding``. status (google.cloud.compute_v1.types.NodeTemplate.Status): [Output Only] The status of the node template. One of the following values: CREATING, READY, and DELETING. + + This field is a member of `oneof`_ ``_status``. status_message (str): [Output Only] An optional, human-readable explanation of the status. + + This field is a member of `oneof`_ ``_status_message``. """ class CpuOvercommitType(proto.Enum): @@ -32338,16 +36822,21 @@ class Status(proto.Enum): class NodeTemplateAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeTemplateAggregatedList.ItemsEntry]): A list of NodeTemplatesScopedList resources. kind (str): [Output Only] Type of resource.Always compute#nodeTemplateAggregatedList for aggregated lists of node templates. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32355,12 +36844,18 @@ class NodeTemplateAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32385,15 +36880,20 @@ def raw_page(self): class NodeTemplateList(proto.Message): r"""Contains a list of node templates. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeTemplate]): A list of NodeTemplate resources. kind (str): [Output Only] Type of resource.Always compute#nodeTemplateList for lists of node templates. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32401,10 +36901,16 @@ class NodeTemplateList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32425,13 +36931,17 @@ def raw_page(self): class NodeTemplateNodeTypeFlexibility(proto.Message): r""" + Attributes: cpus (str): + This field is a member of `oneof`_ ``_cpus``. local_ssd (str): + This field is a member of `oneof`_ ``_local_ssd``. memory (str): + This field is a member of `oneof`_ ``_memory``. """ cpus = proto.Field(proto.STRING, number=3060683, optional=True,) @@ -32441,6 +36951,7 @@ class NodeTemplateNodeTypeFlexibility(proto.Message): class NodeTemplatesScopedList(proto.Message): r""" + Attributes: node_templates (Sequence[google.cloud.compute_v1.types.NodeTemplate]): [Output Only] A list of node templates contained in this @@ -32448,6 +36959,8 @@ class NodeTemplatesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that appears when the node templates list is empty. + + This field is a member of `oneof`_ ``_warning``. """ node_templates = proto.RepeatedField( @@ -32469,36 +36982,60 @@ class NodeType(proto.Message): Attributes: cpu_platform (str): [Output Only] The CPU platform used by this node type. + + This field is a member of `oneof`_ ``_cpu_platform``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): [Output Only] The deprecation status associated with this node type. + + This field is a member of `oneof`_ ``_deprecated``. description (str): [Output Only] An optional textual description of the resource. + + This field is a member of `oneof`_ ``_description``. guest_cpus (int): [Output Only] The number of virtual CPUs that are available to the node type. + + This field is a member of `oneof`_ ``_guest_cpus``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] The type of the resource. Always compute#nodeType for node types. + + This field is a member of `oneof`_ ``_kind``. local_ssd_gb (int): [Output Only] Local SSD available to the node type, defined in GB. + + This field is a member of `oneof`_ ``_local_ssd_gb``. memory_mb (int): [Output Only] The amount of physical memory available to the node type, defined in MB. + + This field is a member of `oneof`_ ``_memory_mb``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. zone (str): [Output Only] The name of the zone where the node type resides, such as us-central1-a. + + This field is a member of `oneof`_ ``_zone``. """ cpu_platform = proto.Field(proto.STRING, number=410285354, optional=True,) @@ -32519,16 +37056,21 @@ class NodeType(proto.Message): class NodeTypeAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeTypeAggregatedList.ItemsEntry]): A list of NodeTypesScopedList resources. kind (str): [Output Only] Type of resource.Always compute#nodeTypeAggregatedList for aggregated lists of node types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32536,12 +37078,18 @@ class NodeTypeAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32563,15 +37111,20 @@ def raw_page(self): class NodeTypeList(proto.Message): r"""Contains a list of node types. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NodeType]): A list of NodeType resources. kind (str): [Output Only] Type of resource.Always compute#nodeTypeList for lists of node types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32579,10 +37132,16 @@ class NodeTypeList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32601,12 +37160,15 @@ def raw_page(self): class NodeTypesScopedList(proto.Message): r""" + Attributes: node_types (Sequence[google.cloud.compute_v1.types.NodeType]): [Output Only] A list of node types contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] An informational warning that appears when the node types list is empty. + + This field is a member of `oneof`_ ``_warning``. """ node_types = proto.RepeatedField( @@ -32626,20 +37188,30 @@ class NotificationEndpoint(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. grpc_settings (google.cloud.compute_v1.types.NotificationEndpointGrpcSettings): Settings of the gRPC notification endpoint including the endpoint URL and the retry duration. + + This field is a member of `oneof`_ ``_grpc_settings``. id (int): [Output Only] A unique identifier for this resource type. The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#notificationEndpoint for notification endpoints. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -32649,14 +37221,20 @@ class NotificationEndpoint(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. region (str): [Output Only] URL of the region where the notification endpoint resides. This field applies only to the regional resource. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -32685,12 +37263,18 @@ class NotificationEndpointGrpcSettings(proto.Message): set the authority header by the sender of notifications. See https://tools.ietf.org/html/rfc7540#section-8.1.2.3 + + This field is a member of `oneof`_ ``_authority``. endpoint (str): Endpoint to which gRPC notifications are sent. This must be a valid gRPCLB DNS name. + + This field is a member of `oneof`_ ``_endpoint``. payload_name (str): Optional. If specified, this field is used to populate the "name" field in gRPC requests. + + This field is a member of `oneof`_ ``_payload_name``. resend_interval (google.cloud.compute_v1.types.Duration): Optional. This field is used to configure how often to send a full update of all non-healthy @@ -32698,11 +37282,15 @@ class NotificationEndpointGrpcSettings(proto.Message): sent. If specified, must be in the range between 600 seconds to 3600 seconds. Nanos are disallowed. + + This field is a member of `oneof`_ ``_resend_interval``. retry_duration_sec (int): How much time (in seconds) is spent attempting notification retries until a successful response is received. Default is 30s. Limit is 20m (1200s). Must be a positive number. + + This field is a member of `oneof`_ ``_retry_duration_sec``. """ authority = proto.Field(proto.STRING, number=401868611, optional=True,) @@ -32716,15 +37304,20 @@ class NotificationEndpointGrpcSettings(proto.Message): class NotificationEndpointList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.NotificationEndpoint]): A list of NotificationEndpoint resources. kind (str): [Output Only] Type of the resource. Always compute#notificationEndpoint for notification endpoints. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32732,10 +37325,16 @@ class NotificationEndpointList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32772,43 +37371,69 @@ class Operation(proto.Message): client_operation_id (str): [Output Only] The value of ``requestId`` if you provided it in the request. Not present otherwise. + + This field is a member of `oneof`_ ``_client_operation_id``. creation_timestamp (str): [Deprecated] This field is deprecated. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): [Output Only] A textual description of the operation, which is set when the operation is created. + + This field is a member of `oneof`_ ``_description``. end_time (str): [Output Only] The time that this operation was completed. This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_end_time``. error (google.cloud.compute_v1.types.Error): [Output Only] If errors are generated during processing of the operation, this field will be populated. + + This field is a member of `oneof`_ ``_error``. http_error_message (str): [Output Only] If the operation fails, this field contains the HTTP error message that was returned, such as ``NOT FOUND``. + + This field is a member of `oneof`_ ``_http_error_message``. http_error_status_code (int): [Output Only] If the operation fails, this field contains the HTTP error status code that was returned. For example, a ``404`` means the resource was not found. + + This field is a member of `oneof`_ ``_http_error_status_code``. id (int): [Output Only] The unique identifier for the operation. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. insert_time (str): [Output Only] The time that this operation was requested. This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_insert_time``. kind (str): [Output Only] Type of the resource. Always ``compute#operation`` for Operation resources. + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] Name of the operation. + + This field is a member of `oneof`_ ``_name``. operation_group_id (str): [Output Only] An ID that represents a group of operations, such as when a group of operations results from a ``bulkInsert`` API request. + + This field is a member of `oneof`_ ``_operation_group_id``. operation_type (str): [Output Only] The type of operation, such as ``insert``, ``update``, or ``delete``, and so on. + + This field is a member of `oneof`_ ``_operation_type``. progress (int): [Output Only] An optional progress indicator that ranges from 0 to 100. There is no requirement that this be linear @@ -32816,32 +37441,50 @@ class Operation(proto.Message): used to guess when the operation will be complete. This number should monotonically increase as the operation progresses. + + This field is a member of `oneof`_ ``_progress``. region (str): [Output Only] The URL of the region where the operation resides. Only applicable when performing regional operations. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. start_time (str): [Output Only] The time that this operation was started by the server. This value is in RFC3339 text format. + + This field is a member of `oneof`_ ``_start_time``. status (google.cloud.compute_v1.types.Operation.Status): [Output Only] The status of the operation, which can be one of the following: ``PENDING``, ``RUNNING``, or ``DONE``. + + This field is a member of `oneof`_ ``_status``. status_message (str): [Output Only] An optional textual description of the current status of the operation. + + This field is a member of `oneof`_ ``_status_message``. target_id (int): [Output Only] The unique target ID, which identifies a specific incarnation of the target resource. + + This field is a member of `oneof`_ ``_target_id``. target_link (str): [Output Only] The URL of the resource that the operation modifies. For operations related to creating a snapshot, this points to the persistent disk that the snapshot was created from. + + This field is a member of `oneof`_ ``_target_link``. user (str): [Output Only] User who requested the operation, for example: ``user@example.com``. + + This field is a member of `oneof`_ ``_user``. warnings (Sequence[google.cloud.compute_v1.types.Warnings]): [Output Only] If warning messages are generated during processing of the operation, this field will be populated. @@ -32849,6 +37492,8 @@ class Operation(proto.Message): [Output Only] The URL of the zone where the operation resides. Only applicable when performing per-zone operations. + + This field is a member of `oneof`_ ``_zone``. """ class Status(proto.Enum): @@ -32888,16 +37533,21 @@ class Status(proto.Enum): class OperationAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.OperationAggregatedList.ItemsEntry]): [Output Only] A map of scoped operation lists. kind (str): [Output Only] Type of resource. Always ``compute#operationAggregatedList`` for aggregated lists of operations. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32905,12 +37555,18 @@ class OperationAggregatedList(proto.Message): value for the query parameter ``pageToken`` in the next list request. Subsequent list requests will have their own ``nextPageToken`` to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32932,15 +37588,20 @@ def raw_page(self): class OperationList(proto.Message): r"""Contains a list of Operation resources. + Attributes: id (str): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Operation]): [Output Only] A list of Operation resources. kind (str): [Output Only] Type of resource. Always ``compute#operations`` for Operations resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -32948,10 +37609,16 @@ class OperationList(proto.Message): value for the query parameter ``pageToken`` in the next list request. Subsequent list requests will have their own ``nextPageToken`` to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -32970,12 +37637,15 @@ def raw_page(self): class OperationsScopedList(proto.Message): r""" + Attributes: operations (Sequence[google.cloud.compute_v1.types.Operation]): [Output Only] A list of operations contained in this scope. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of operations when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ operations = proto.RepeatedField( @@ -32996,44 +37666,60 @@ class OutlierDetection(proto.Message): real ejection time is equal to the base ejection time multiplied by the number of times the host has been ejected. Defaults to 30000ms or 30s. + + This field is a member of `oneof`_ ``_base_ejection_time``. consecutive_errors (int): Number of errors before a host is ejected from the connection pool. When the backend host is accessed over HTTP, a 5xx return code qualifies as an error. Defaults to 5. + + This field is a member of `oneof`_ ``_consecutive_errors``. consecutive_gateway_failure (int): The number of consecutive gateway failures (502, 503, 504 status or connection errors that are mapped to one of those status codes) before a consecutive gateway failure ejection occurs. Defaults to 3. + + This field is a member of `oneof`_ ``_consecutive_gateway_failure``. enforcing_consecutive_errors (int): The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive 5xx. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 0. + + This field is a member of `oneof`_ ``_enforcing_consecutive_errors``. enforcing_consecutive_gateway_failure (int): The percentage chance that a host will be actually ejected when an outlier status is detected through consecutive gateway failures. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 100. + + This field is a member of `oneof`_ ``_enforcing_consecutive_gateway_failure``. enforcing_success_rate (int): The percentage chance that a host will be actually ejected when an outlier status is detected through success rate statistics. This setting can be used to disable ejection or to ramp it up slowly. Defaults to 100. + + This field is a member of `oneof`_ ``_enforcing_success_rate``. interval (google.cloud.compute_v1.types.Duration): Time interval between ejection analysis sweeps. This can result in both new ejections as well as hosts being returned to service. Defaults to 1 second. + + This field is a member of `oneof`_ ``_interval``. max_ejection_percent (int): Maximum percentage of hosts in the load balancing pool for the backend service that can be ejected. Defaults to 50%. + + This field is a member of `oneof`_ ``_max_ejection_percent``. success_rate_minimum_hosts (int): The number of hosts in a cluster that must have enough request volume to detect success @@ -33041,6 +37727,8 @@ class OutlierDetection(proto.Message): than this setting, outlier detection via success rate statistics is not performed for any host in the cluster. Defaults to 5. + + This field is a member of `oneof`_ ``_success_rate_minimum_hosts``. success_rate_request_volume (int): The minimum number of total requests that must be collected in one interval (as defined by @@ -33049,6 +37737,8 @@ class OutlierDetection(proto.Message): the volume is lower than this setting, outlier detection via success rate statistics is not performed for that host. Defaults to 100. + + This field is a member of `oneof`_ ``_success_rate_request_volume``. success_rate_stdev_factor (int): This factor is used to determine the ejection threshold for success rate outlier ejection. The ejection threshold is the @@ -33058,6 +37748,8 @@ class OutlierDetection(proto.Message): factor is divided by a thousand to get a double. That is, if the desired factor is 1.9, the runtime value should be 1900. Defaults to 1900. + + This field is a member of `oneof`_ ``_success_rate_stdev_factor``. """ base_ejection_time = proto.Field( @@ -33104,32 +37796,48 @@ class PacketMirroring(proto.Message): as collector for mirrored traffic. The specified forwarding rule must have isMirroringCollector set to true. + + This field is a member of `oneof`_ ``_collector_ilb``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. enable (google.cloud.compute_v1.types.PacketMirroring.Enable): Indicates whether or not this packet mirroring takes effect. If set to FALSE, this packet mirroring policy will not be enforced on the network. The default is TRUE. + + This field is a member of `oneof`_ ``_enable``. filter (google.cloud.compute_v1.types.PacketMirroringFilter): Filter for mirrored traffic. If unspecified, all traffic is mirrored. + + This field is a member of `oneof`_ ``_filter``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#packetMirroring for packet mirrorings. + + This field is a member of `oneof`_ ``_kind``. mirrored_resources (google.cloud.compute_v1.types.PacketMirroringMirroredResourceInfo): PacketMirroring mirroredResourceInfos. MirroredResourceInfo specifies a set of mirrored VM instances, subnetworks and/or tags for which traffic from/to all VM instances will be mirrored. + + This field is a member of `oneof`_ ``_mirrored_resources``. name (str): Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, @@ -33139,12 +37847,16 @@ class PacketMirroring(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (google.cloud.compute_v1.types.PacketMirroringNetworkInfo): Specifies the mirrored VPC network. Only packets in this network will be mirrored. All mirrored VMs should have a NIC in the given network. All mirrored subnetworks should belong to the given network. + + This field is a member of `oneof`_ ``_network``. priority (int): The priority of applying this configuration. Priority is used to break ties in cases where @@ -33153,11 +37865,17 @@ class PacketMirroring(proto.Message): Instance, the one with the lowest-numbered priority value wins. Default value is 1000. Valid range is 0 through 65535. + + This field is a member of `oneof`_ ``_priority``. region (str): [Output Only] URI of the region where the packetMirroring resides. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ class Enable(proto.Enum): @@ -33203,14 +37921,19 @@ class Enable(proto.Enum): class PacketMirroringAggregatedList(proto.Message): r"""Contains a list of packetMirrorings. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.PacketMirroringAggregatedList.ItemsEntry]): A list of PacketMirroring resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -33218,12 +37941,18 @@ class PacketMirroringAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -33248,6 +37977,7 @@ def raw_page(self): class PacketMirroringFilter(proto.Message): r""" + Attributes: I_p_protocols (Sequence[str]): Protocols that apply as filter on mirrored @@ -33267,6 +37997,8 @@ class PacketMirroringFilter(proto.Message): direction (google.cloud.compute_v1.types.PacketMirroringFilter.Direction): Direction of traffic to mirror, either INGRESS, EGRESS, or BOTH. The default is BOTH. + + This field is a member of `oneof`_ ``_direction``. """ class Direction(proto.Enum): @@ -33287,14 +38019,19 @@ class Direction(proto.Enum): class PacketMirroringForwardingRuleInfo(proto.Message): r""" + Attributes: canonical_url (str): [Output Only] Unique identifier for the forwarding rule; defined by the server. + + This field is a member of `oneof`_ ``_canonical_url``. url (str): Resource URL to the forwarding rule representing the ILB configured as destination of the mirrored traffic. + + This field is a member of `oneof`_ ``_url``. """ canonical_url = proto.Field(proto.STRING, number=512294820, optional=True,) @@ -33303,15 +38040,20 @@ class PacketMirroringForwardingRuleInfo(proto.Message): class PacketMirroringList(proto.Message): r"""Contains a list of PacketMirroring resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.PacketMirroring]): A list of PacketMirroring resources. kind (str): [Output Only] Type of resource. Always compute#packetMirroring for packetMirrorings. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -33319,10 +38061,16 @@ class PacketMirroringList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -33343,6 +38091,7 @@ def raw_page(self): class PacketMirroringMirroredResourceInfo(proto.Message): r""" + Attributes: instances (Sequence[google.cloud.compute_v1.types.PacketMirroringMirroredResourceInfoInstanceInfo]): A set of virtual machine instances that are @@ -33380,13 +38129,18 @@ class PacketMirroringMirroredResourceInfo(proto.Message): class PacketMirroringMirroredResourceInfoInstanceInfo(proto.Message): r""" + Attributes: canonical_url (str): [Output Only] Unique identifier for the instance; defined by the server. + + This field is a member of `oneof`_ ``_canonical_url``. url (str): Resource URL to the virtual machine instance which is being mirrored. + + This field is a member of `oneof`_ ``_url``. """ canonical_url = proto.Field(proto.STRING, number=512294820, optional=True,) @@ -33395,14 +38149,19 @@ class PacketMirroringMirroredResourceInfoInstanceInfo(proto.Message): class PacketMirroringMirroredResourceInfoSubnetInfo(proto.Message): r""" + Attributes: canonical_url (str): [Output Only] Unique identifier for the subnetwork; defined by the server. + + This field is a member of `oneof`_ ``_canonical_url``. url (str): Resource URL to the subnetwork for which traffic from/to all VM instances will be mirrored. + + This field is a member of `oneof`_ ``_url``. """ canonical_url = proto.Field(proto.STRING, number=512294820, optional=True,) @@ -33411,12 +38170,17 @@ class PacketMirroringMirroredResourceInfoSubnetInfo(proto.Message): class PacketMirroringNetworkInfo(proto.Message): r""" + Attributes: canonical_url (str): [Output Only] Unique identifier for the network; defined by the server. + + This field is a member of `oneof`_ ``_canonical_url``. url (str): URL of the network resource. + + This field is a member of `oneof`_ ``_url``. """ canonical_url = proto.Field(proto.STRING, number=512294820, optional=True,) @@ -33425,6 +38189,7 @@ class PacketMirroringNetworkInfo(proto.Message): class PacketMirroringsScopedList(proto.Message): r""" + Attributes: packet_mirrorings (Sequence[google.cloud.compute_v1.types.PacketMirroring]): A list of packetMirrorings contained in this @@ -33432,6 +38197,8 @@ class PacketMirroringsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of packetMirrorings when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ packet_mirrorings = proto.RepeatedField( @@ -33449,6 +38216,8 @@ class PatchAutoscalerRequest(proto.Message): Attributes: autoscaler (str): Name of the autoscaler to patch. + + This field is a member of `oneof`_ ``_autoscaler``. autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): The body resource for this request project (str): @@ -33469,6 +38238,8 @@ class PatchAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): Name of the zone for this request. """ @@ -33509,6 +38280,8 @@ class PatchBackendBucketRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_bucket = proto.Field(proto.STRING, number=91714037,) @@ -33546,6 +38319,8 @@ class PatchBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -33581,6 +38356,8 @@ class PatchFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -33617,6 +38394,8 @@ class PatchFirewallRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall = proto.Field(proto.STRING, number=511016192,) @@ -33654,6 +38433,8 @@ class PatchForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ forwarding_rule = proto.Field(proto.STRING, number=269964030,) @@ -33692,6 +38473,8 @@ class PatchGlobalForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ forwarding_rule = proto.Field(proto.STRING, number=269964030,) @@ -33728,6 +38511,8 @@ class PatchGlobalPublicDelegatedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -33765,6 +38550,8 @@ class PatchHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check = proto.Field(proto.STRING, number=308876645,) @@ -33802,6 +38589,8 @@ class PatchImageRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ image = proto.Field(proto.STRING, number=100313435,) @@ -33837,6 +38626,8 @@ class PatchInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where you want to create the managed instance group. @@ -33880,6 +38671,8 @@ class PatchInterconnectAttachmentRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ interconnect_attachment = proto.Field(proto.STRING, number=308135284,) @@ -33918,6 +38711,8 @@ class PatchInterconnectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ interconnect = proto.Field(proto.STRING, number=224601230,) @@ -33955,6 +38750,8 @@ class PatchNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network = proto.Field(proto.STRING, number=232872494,) @@ -33990,6 +38787,8 @@ class PatchNodeGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -34033,6 +38832,8 @@ class PatchPacketMirroringRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ packet_mirroring = proto.Field(proto.STRING, number=22305996,) @@ -34073,6 +38874,8 @@ class PatchPerInstanceConfigsInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. It should conform to @@ -34122,6 +38925,8 @@ class PatchPerInstanceConfigsRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -34161,6 +38966,8 @@ class PatchPublicAdvertisedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -34199,6 +39006,8 @@ class PatchPublicDelegatedPrefixeRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -34217,6 +39026,8 @@ class PatchRegionAutoscalerRequest(proto.Message): Attributes: autoscaler (str): Name of the autoscaler to patch. + + This field is a member of `oneof`_ ``_autoscaler``. autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): The body resource for this request project (str): @@ -34239,6 +39050,8 @@ class PatchRegionAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ autoscaler = proto.Field(proto.STRING, number=517258967, optional=True,) @@ -34279,6 +39092,8 @@ class PatchRegionBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -34319,6 +39134,8 @@ class PatchRegionHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check = proto.Field(proto.STRING, number=308876645,) @@ -34361,6 +39178,8 @@ class PatchRegionHealthCheckServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check_service = proto.Field(proto.STRING, number=408374747,) @@ -34401,6 +39220,8 @@ class PatchRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -34424,6 +39245,8 @@ class PatchRegionUrlMapRequest(proto.Message): request_id (str): begin_interface: MixerMutationRequestBuilder Request ID to support idempotency. + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap resource to patch. url_map_resource (google.cloud.compute_v1.types.UrlMap): @@ -34462,6 +39285,8 @@ class PatchRouterRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. router (str): Name of the Router resource to patch. router_resource (google.cloud.compute_v1.types.Router): @@ -34486,6 +39311,8 @@ class PatchRuleFirewallPolicyRequest(proto.Message): The body resource for this request priority (int): The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -34502,6 +39329,8 @@ class PatchRuleFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -34519,6 +39348,8 @@ class PatchRuleSecurityPolicyRequest(proto.Message): Attributes: priority (int): The priority of the rule to patch. + + This field is a member of `oneof`_ ``_priority``. project (str): Project ID for this request. security_policy (str): @@ -34558,6 +39389,8 @@ class PatchSecurityPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. security_policy (str): Name of the security policy to update. security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): @@ -34596,6 +39429,8 @@ class PatchServiceAttachmentRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. service_attachment (str): The resource id of the ServiceAttachment to patch. It should conform to RFC1035 resource @@ -34637,6 +39472,8 @@ class PatchSslPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. ssl_policy (str): Name of the SSL policy to update. The name must be 1-63 characters long, and comply with @@ -34669,6 +39506,8 @@ class PatchSubnetworkRequest(proto.Message): setting the role to ACTIVE. Note that after this patch operation the roles of the ACTIVE and BACKUP subnetworks will be swapped. + + This field is a member of `oneof`_ ``_drain_timeout_seconds``. project (str): Project ID for this request. region (str): @@ -34689,6 +39528,8 @@ class PatchSubnetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. subnetwork (str): Name of the Subnetwork resource to patch. subnetwork_resource (google.cloud.compute_v1.types.Subnetwork): @@ -34726,6 +39567,8 @@ class PatchTargetGrpcProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_grpc_proxy (str): Name of the TargetGrpcProxy resource to patch. @@ -34762,6 +39605,8 @@ class PatchTargetHttpProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy (str): Name of the TargetHttpProxy resource to patch. @@ -34798,6 +39643,8 @@ class PatchTargetHttpsProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy (str): Name of the TargetHttpsProxy resource to patch. @@ -34836,6 +39683,8 @@ class PatchUrlMapRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap resource to patch. url_map_resource (google.cloud.compute_v1.types.UrlMap): @@ -34869,6 +39718,8 @@ class PathMatcher(proto.Message): set. UrlMaps for external HTTP(S) load balancers support only the urlRewrite action within a pathMatcher's defaultRouteAction. + + This field is a member of `oneof`_ ``_default_route_action``. default_service (str): The full or partial URL to the BackendService resource. This will be used if none of the pathRules or routeRules defined @@ -34890,6 +39741,8 @@ class PathMatcher(proto.Message): Authorization requires one or more of the following Google IAM permissions on the specified resource default_service: - compute.backendBuckets.use - compute.backendServices.use + + This field is a member of `oneof`_ ``_default_service``. default_url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): When none of the specified pathRules or routeRules match, the request is redirected to a @@ -34898,10 +39751,14 @@ class PathMatcher(proto.Message): or defaultRouteAction must not be set. Not supported when the URL map is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_default_url_redirect``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. header_action (google.cloud.compute_v1.types.HttpHeaderAction): Specifies changes to request and response headers that need to take effect for the @@ -34914,9 +39771,13 @@ class PathMatcher(proto.Message): EXTERNAL. Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_header_action``. name (str): The name to which this PathMatcher is referred by the HostRule. + + This field is a member of `oneof`_ ``_name``. path_rules (Sequence[google.cloud.compute_v1.types.PathRule]): The list of path rules. Use this list instead of routeRules when routing based on simple path matching is all that's @@ -34982,6 +39843,8 @@ class PathRule(proto.Message): UrlMaps for external HTTP(S) load balancers support only the urlRewrite action within a pathRule's routeAction. + + This field is a member of `oneof`_ ``_route_action``. service (str): The full or partial URL of the backend service resource to which traffic is directed if @@ -34995,12 +39858,16 @@ class PathRule(proto.Message): weightedBackendServices, service must not be specified. Only one of urlRedirect, service or routeAction.weightedBackendService must be set. + + This field is a member of `oneof`_ ``_service``. url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If urlRedirect is specified, service or routeAction must not be set. Not supported when the URL map is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_url_redirect``. """ paths = proto.RepeatedField(proto.STRING, number=106438894,) @@ -35015,6 +39882,7 @@ class PathRule(proto.Message): class PerInstanceConfig(proto.Message): r""" + Attributes: fingerprint (str): Fingerprint of this per-instance config. This @@ -35023,6 +39891,8 @@ class PerInstanceConfig(proto.Message): up-to-date fingerprint must be provided in order to update an existing per-instance config or the field needs to be unset. + + This field is a member of `oneof`_ ``_fingerprint``. name (str): The name of a per-instance config and its corresponding instance. Serves as a merge key @@ -35034,13 +39904,19 @@ class PerInstanceConfig(proto.Message): config for a VM instance that either doesn't exist or is not part of the group will result in an error. + + This field is a member of `oneof`_ ``_name``. preserved_state (google.cloud.compute_v1.types.PreservedState): The intended preserved state for the given instance. Does not contain preserved state generated from a stateful policy. + + This field is a member of `oneof`_ ``_preserved_state``. status (google.cloud.compute_v1.types.PerInstanceConfig.Status): The status of applying this per-instance config on the corresponding managed instance. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -35124,9 +40000,13 @@ class Policy(proto.Message): you omit this field, then IAM allows you to overwrite a version ``3`` policy with a version ``1`` policy, and all of the conditions in the version ``3`` policy are lost. + + This field is a member of `oneof`_ ``_etag``. iam_owned (bool): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_iam_owned``. rules (Sequence[google.cloud.compute_v1.types.Rule]): This is deprecated and has no effect. Do not use. @@ -35151,6 +40031,8 @@ class Policy(proto.Message): resources support conditions in their IAM policies, see the `IAM documentation `__. + + This field is a member of `oneof`_ ``_version``. """ audit_configs = proto.RepeatedField( @@ -35165,6 +40047,7 @@ class Policy(proto.Message): class PreconfiguredWafSet(proto.Message): r""" + Attributes: expression_sets (Sequence[google.cloud.compute_v1.types.WafExpressionSet]): List of entities that are currently supported @@ -35178,6 +40061,7 @@ class PreconfiguredWafSet(proto.Message): class PreservedState(proto.Message): r"""Preserved state for a given instance. + Attributes: disks (Sequence[google.cloud.compute_v1.types.PreservedState.DisksEntry]): Preserved disks defined for this instance. @@ -35198,6 +40082,7 @@ class PreservedState(proto.Message): class PreservedStatePreservedDisk(proto.Message): r""" + Attributes: auto_delete (google.cloud.compute_v1.types.PreservedStatePreservedDisk.AutoDelete): These stateful disks will never be deleted during @@ -35206,13 +40091,19 @@ class PreservedStatePreservedDisk(proto.Message): is no longer used by the group, e.g. when the given instance or the whole MIG is deleted. Note: disks attached in READ_ONLY mode cannot be auto-deleted. + + This field is a member of `oneof`_ ``_auto_delete``. mode (google.cloud.compute_v1.types.PreservedStatePreservedDisk.Mode): The mode in which to attach this disk, either READ_WRITE or READ_ONLY. If not specified, the default is to attach the disk in READ_WRITE mode. + + This field is a member of `oneof`_ ``_mode``. source (str): The URL of the disk resource that is stateful and should be attached to the VM instance. + + This field is a member of `oneof`_ ``_source``. """ class AutoDelete(proto.Enum): @@ -35273,20 +40164,30 @@ class Project(proto.Message): Metadata key/value pairs available to all instances contained in this project. See Custom metadata for more information. + + This field is a member of `oneof`_ ``_common_instance_metadata``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. default_network_tier (google.cloud.compute_v1.types.Project.DefaultNetworkTier): This signifies the default network tier used for configuring resources of the project and can only take the following values: PREMIUM, STANDARD. Initially the default network tier is PREMIUM. + + This field is a member of `oneof`_ ``_default_network_tier``. default_service_account (str): [Output Only] Default service account used by VMs running in this project. + + This field is a member of `oneof`_ ``_default_service_account``. description (str): An optional textual description of the resource. + + This field is a member of `oneof`_ ``_description``. enabled_features (Sequence[str]): Restricted features enabled for use on this project. @@ -35295,25 +40196,37 @@ class Project(proto.Message): identifier is defined by the server. This is *not* the project ID, and is just a unique ID used by Compute Engine to identify resources. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#project for projects. + + This field is a member of `oneof`_ ``_kind``. name (str): The project ID. For example: my-example- roject. Use the project ID to make requests to Compute Engine. + + This field is a member of `oneof`_ ``_name``. quotas (Sequence[google.cloud.compute_v1.types.Quota]): [Output Only] Quotas assigned to this project. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. usage_export_location (google.cloud.compute_v1.types.UsageExportLocation): The naming prefix for daily usage reports and the Google Cloud Storage bucket where they are stored. + + This field is a member of `oneof`_ ``_usage_export_location``. xpn_project_status (google.cloud.compute_v1.types.Project.XpnProjectStatus): [Output Only] The role this project has in a shared VPC configuration. Currently, only projects with the host role, which is specified by the value HOST, are differentiated. + + This field is a member of `oneof`_ ``_xpn_project_status``. """ class DefaultNetworkTier(proto.Enum): @@ -35362,9 +40275,12 @@ class XpnProjectStatus(proto.Enum): class ProjectsDisableXpnResourceRequest(proto.Message): r""" + Attributes: xpn_resource (google.cloud.compute_v1.types.XpnResourceId): Service resource (a.k.a service project) ID. + + This field is a member of `oneof`_ ``_xpn_resource``. """ xpn_resource = proto.Field( @@ -35374,9 +40290,12 @@ class ProjectsDisableXpnResourceRequest(proto.Message): class ProjectsEnableXpnResourceRequest(proto.Message): r""" + Attributes: xpn_resource (google.cloud.compute_v1.types.XpnResourceId): Service resource (a.k.a service project) ID. + + This field is a member of `oneof`_ ``_xpn_resource``. """ xpn_resource = proto.Field( @@ -35386,11 +40305,14 @@ class ProjectsEnableXpnResourceRequest(proto.Message): class ProjectsGetXpnResources(proto.Message): r""" + Attributes: kind (str): [Output Only] Type of resource. Always compute#projectsGetXpnResources for lists of service resources (a.k.a service projects) + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -35398,6 +40320,8 @@ class ProjectsGetXpnResources(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. resources (Sequence[google.cloud.compute_v1.types.XpnResourceId]): Service resources (a.k.a service projects) attached to this project as their shared VPC @@ -35417,12 +40341,15 @@ def raw_page(self): class ProjectsListXpnHostsRequest(proto.Message): r""" + Attributes: organization (str): Optional organization ID managed by Cloud Resource Manager, for which to list shared VPC host projects. If not specified, the organization will be inferred from the project. + + This field is a member of `oneof`_ ``_organization``. """ organization = proto.Field(proto.STRING, number=105180467, optional=True,) @@ -35430,9 +40357,12 @@ class ProjectsListXpnHostsRequest(proto.Message): class ProjectsSetDefaultNetworkTierRequest(proto.Message): r""" + Attributes: network_tier (google.cloud.compute_v1.types.ProjectsSetDefaultNetworkTierRequest.NetworkTier): Default network tier to be set. + + This field is a member of `oneof`_ ``_network_tier``. """ class NetworkTier(proto.Enum): @@ -35455,13 +40385,19 @@ class PublicAdvertisedPrefix(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. dns_verification_ip (str): The IPv4 address to be used for reverse DNS verification. + + This field is a member of `oneof`_ ``_dns_verification_ip``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -35474,16 +40410,24 @@ class PublicAdvertisedPrefix(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a PublicAdvertisedPrefix. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource type. The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. ip_cidr_range (str): The IPv4 address range, in CIDR format, represented by this public advertised prefix. + + This field is a member of `oneof`_ ``_ip_cidr_range``. kind (str): [Output Only] Type of the resource. Always compute#publicAdvertisedPrefix for public advertised prefixes. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -35493,16 +40437,24 @@ class PublicAdvertisedPrefix(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. public_delegated_prefixs (Sequence[google.cloud.compute_v1.types.PublicAdvertisedPrefixPublicDelegatedPrefix]): [Output Only] The list of public delegated prefixes that exist for this public advertised prefix. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. shared_secret (str): [Output Only] The shared secret to be used for reverse DNS verification. + + This field is a member of `oneof`_ ``_shared_secret``. status (google.cloud.compute_v1.types.PublicAdvertisedPrefix.Status): The status of the public advertised prefix. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -35536,16 +40488,21 @@ class Status(proto.Enum): class PublicAdvertisedPrefixList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.PublicAdvertisedPrefix]): A list of PublicAdvertisedPrefix resources. kind (str): [Output Only] Type of the resource. Always compute#publicAdvertisedPrefix for public advertised prefixes. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -35553,10 +40510,16 @@ class PublicAdvertisedPrefixList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -35583,20 +40546,30 @@ class PublicAdvertisedPrefixPublicDelegatedPrefix(proto.Message): ip_range (str): The IP address range of the public delegated prefix + + This field is a member of `oneof`_ ``_ip_range``. name (str): The name of the public delegated prefix + + This field is a member of `oneof`_ ``_name``. project (str): The project number of the public delegated prefix + + This field is a member of `oneof`_ ``_project``. region (str): The region of the public delegated prefix if it is regional. If absent, the prefix is global. + + This field is a member of `oneof`_ ``_region``. status (str): The status of the public delegated prefix. Possible values are: INITIALIZING: The public delegated prefix is being initialized and addresses cannot be created yet. ANNOUNCED: The public delegated prefix is active. + + This field is a member of `oneof`_ ``_status``. """ ip_range = proto.Field(proto.STRING, number=145092645, optional=True,) @@ -35617,10 +40590,14 @@ class PublicDelegatedPrefix(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -35632,17 +40609,27 @@ class PublicDelegatedPrefix(proto.Message): will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a PublicDelegatedPrefix. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource type. The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. ip_cidr_range (str): The IPv4 address range, in CIDR format, represented by this public delegated prefix. + + This field is a member of `oneof`_ ``_ip_cidr_range``. is_live_migration (bool): If true, the prefix will be live migrated. + + This field is a member of `oneof`_ ``_is_live_migration``. kind (str): [Output Only] Type of the resource. Always compute#publicDelegatedPrefix for public delegated prefixes. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -35652,9 +40639,13 @@ class PublicDelegatedPrefix(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. parent_prefix (str): The URL of parent prefix. Either PublicAdvertisedPrefix or PublicDelegatedPrefix. + + This field is a member of `oneof`_ ``_parent_prefix``. public_delegated_sub_prefixs (Sequence[google.cloud.compute_v1.types.PublicDelegatedPrefixPublicDelegatedSubPrefix]): The list of sub public delegated prefixes that exist for this public delegated prefix. @@ -35664,10 +40655,16 @@ class PublicDelegatedPrefix(proto.Message): resource. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.PublicDelegatedPrefix.Status): [Output Only] The status of the public delegated prefix. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -35699,10 +40696,13 @@ class Status(proto.Enum): class PublicDelegatedPrefixAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.PublicDelegatedPrefixAggregatedList.ItemsEntry]): A list of PublicDelegatedPrefixesScopedList resources. @@ -35710,6 +40710,8 @@ class PublicDelegatedPrefixAggregatedList(proto.Message): [Output Only] Type of the resource. Always compute#publicDelegatedPrefixAggregatedList for aggregated lists of public delegated prefixes. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -35717,12 +40719,18 @@ class PublicDelegatedPrefixAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -35747,16 +40755,21 @@ def raw_page(self): class PublicDelegatedPrefixList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.PublicDelegatedPrefix]): A list of PublicDelegatedPrefix resources. kind (str): [Output Only] Type of the resource. Always compute#publicDelegatedPrefixList for public delegated prefixes. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -35764,10 +40777,16 @@ class PublicDelegatedPrefixList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -35788,27 +40807,42 @@ def raw_page(self): class PublicDelegatedPrefixPublicDelegatedSubPrefix(proto.Message): r"""Represents a sub PublicDelegatedPrefix. + Attributes: delegatee_project (str): Name of the project scoping this PublicDelegatedSubPrefix. + + This field is a member of `oneof`_ ``_delegatee_project``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. ip_cidr_range (str): The IPv4 address range, in CIDR format, represented by this sub public delegated prefix. + + This field is a member of `oneof`_ ``_ip_cidr_range``. is_address (bool): Whether the sub prefix is delegated to create Address resources in the delegatee project. + + This field is a member of `oneof`_ ``_is_address``. name (str): The name of the sub public delegated prefix. + + This field is a member of `oneof`_ ``_name``. region (str): [Output Only] The region of the sub public delegated prefix if it is regional. If absent, the sub prefix is global. + + This field is a member of `oneof`_ ``_region``. status (google.cloud.compute_v1.types.PublicDelegatedPrefixPublicDelegatedSubPrefix.Status): [Output Only] The status of the sub public delegated prefix. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -35828,6 +40862,7 @@ class Status(proto.Enum): class PublicDelegatedPrefixesScopedList(proto.Message): r""" + Attributes: public_delegated_prefixes (Sequence[google.cloud.compute_v1.types.PublicDelegatedPrefix]): [Output Only] A list of PublicDelegatedPrefixes contained in @@ -35835,6 +40870,8 @@ class PublicDelegatedPrefixesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of public delegated prefixes when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ public_delegated_prefixes = proto.RepeatedField( @@ -35847,16 +40884,25 @@ class PublicDelegatedPrefixesScopedList(proto.Message): class Quota(proto.Message): r"""A quotas entry. + Attributes: limit (float): [Output Only] Quota limit for this metric. + + This field is a member of `oneof`_ ``_limit``. metric (google.cloud.compute_v1.types.Quota.Metric): [Output Only] Name of the quota metric. + + This field is a member of `oneof`_ ``_metric``. owner (str): [Output Only] Owning resource. This is the resource on which this quota is applied. + + This field is a member of `oneof`_ ``_owner``. usage (float): [Output Only] Current usage of this metric. + + This field is a member of `oneof`_ ``_usage``. """ class Metric(proto.Enum): @@ -35994,6 +41040,7 @@ class Metric(proto.Enum): class RawDisk(proto.Message): r"""The parameters of the raw disk image. + Attributes: container_type (google.cloud.compute_v1.types.RawDisk.ContainerType): The format used to encode and transmit the @@ -36001,10 +41048,14 @@ class RawDisk(proto.Message): a container and transmission format and not a runtime format. Provided by the client when the disk image is created. + + This field is a member of `oneof`_ ``_container_type``. sha1_checksum (str): [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before unpackaging provided by the client when the disk image is created. + + This field is a member of `oneof`_ ``_sha1_checksum``. source (str): The full Google Cloud Storage URL where the raw disk image archive is stored. The following are valid formats for the @@ -36015,6 +41066,8 @@ class RawDisk(proto.Message): provide the full or partial URL of one of the following: - The rawDisk.source URL - The sourceDisk URL - The sourceImage URL - The sourceSnapshot URL + + This field is a member of `oneof`_ ``_source``. """ class ContainerType(proto.Enum): @@ -36061,6 +41114,8 @@ class RecreateInstancesInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. @@ -36107,6 +41162,8 @@ class RecreateInstancesRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -36122,19 +41179,28 @@ class RecreateInstancesRegionInstanceGroupManagerRequest(proto.Message): class Reference(proto.Message): r"""Represents a reference to a resource. + Attributes: kind (str): [Output Only] Type of the resource. Always compute#reference for references. + + This field is a member of `oneof`_ ``_kind``. reference_type (str): A description of the reference type with no implied semantics. Possible values include: 1. MEMBER_OF + + This field is a member of `oneof`_ ``_reference_type``. referrer (str): URL of the resource which refers to the target. + + This field is a member of `oneof`_ ``_referrer``. target (str): URL of the resource to which this reference points. + + This field is a member of `oneof`_ ``_target``. """ kind = proto.Field(proto.STRING, number=3292052, optional=True,) @@ -36151,27 +41217,45 @@ class Region(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): [Output Only] The deprecation status associated with this region. + + This field is a member of `oneof`_ ``_deprecated``. description (str): [Output Only] Textual description of the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#region for regions. + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. quotas (Sequence[google.cloud.compute_v1.types.Quota]): [Output Only] Quotas assigned to this region. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.Region.Status): [Output Only] Status of the region, either UP or DOWN. + + This field is a member of `oneof`_ ``_status``. supports_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_supports_pzs``. zones (Sequence[str]): [Output Only] A list of zones available in this region, in the form of resource URLs. @@ -36200,14 +41284,19 @@ class Status(proto.Enum): class RegionAutoscalerList(proto.Message): r"""Contains a list of autoscalers. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Autoscaler]): A list of Autoscaler resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -36215,10 +41304,16 @@ class RegionAutoscalerList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36237,15 +41332,20 @@ def raw_page(self): class RegionDiskTypeList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.DiskType]): A list of DiskType resources. kind (str): [Output Only] Type of resource. Always compute#regionDiskTypeList for region disk types. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -36253,10 +41353,16 @@ class RegionDiskTypeList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36275,6 +41381,7 @@ def raw_page(self): class RegionDisksAddResourcePoliciesRequest(proto.Message): r""" + Attributes: resource_policies (Sequence[str]): Resource policies to be added to this disk. @@ -36285,6 +41392,7 @@ class RegionDisksAddResourcePoliciesRequest(proto.Message): class RegionDisksRemoveResourcePoliciesRequest(proto.Message): r""" + Attributes: resource_policies (Sequence[str]): Resource policies to be removed from this @@ -36296,10 +41404,13 @@ class RegionDisksRemoveResourcePoliciesRequest(proto.Message): class RegionDisksResizeRequest(proto.Message): r""" + Attributes: size_gb (int): The new size of the regional persistent disk, which is specified in GB. + + This field is a member of `oneof`_ ``_size_gb``. """ size_gb = proto.Field(proto.INT64, number=494929369, optional=True,) @@ -36307,14 +41418,19 @@ class RegionDisksResizeRequest(proto.Message): class RegionInstanceGroupList(proto.Message): r"""Contains a list of InstanceGroup resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceGroup]): A list of InstanceGroup resources. kind (str): The resource type. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -36322,10 +41438,16 @@ class RegionInstanceGroupList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36346,6 +41468,7 @@ def raw_page(self): class RegionInstanceGroupManagerDeleteInstanceConfigReq(proto.Message): r"""RegionInstanceGroupManagers.deletePerInstanceConfigs + Attributes: names (Sequence[str]): The list of instance names for which we want @@ -36358,16 +41481,21 @@ class RegionInstanceGroupManagerDeleteInstanceConfigReq(proto.Message): class RegionInstanceGroupManagerList(proto.Message): r"""Contains a list of managed instance groups. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceGroupManager]): A list of InstanceGroupManager resources. kind (str): [Output Only] The resource type, which is always compute#instanceGroupManagerList for a list of managed instance groups that exist in th regional scope. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -36375,10 +41503,16 @@ class RegionInstanceGroupManagerList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36399,6 +41533,7 @@ def raw_page(self): class RegionInstanceGroupManagerPatchInstanceConfigReq(proto.Message): r"""RegionInstanceGroupManagers.patchPerInstanceConfigs + Attributes: per_instance_configs (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): The list of per-instance configs to insert or @@ -36412,6 +41547,7 @@ class RegionInstanceGroupManagerPatchInstanceConfigReq(proto.Message): class RegionInstanceGroupManagerUpdateInstanceConfigReq(proto.Message): r"""RegionInstanceGroupManagers.updatePerInstanceConfigs + Attributes: per_instance_configs (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): The list of per-instance configs to insert or @@ -36425,6 +41561,7 @@ class RegionInstanceGroupManagerUpdateInstanceConfigReq(proto.Message): class RegionInstanceGroupManagersAbandonInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[str]): The URLs of one or more instances to abandon. This can be a @@ -36437,12 +41574,15 @@ class RegionInstanceGroupManagersAbandonInstancesRequest(proto.Message): class RegionInstanceGroupManagersApplyUpdatesRequest(proto.Message): r"""RegionInstanceGroupManagers.applyUpdatesToInstances + Attributes: all_instances (bool): Flag to update all instances instead of specified list of ���instances���. If the flag is set to true then the instances may not be specified in the request. + + This field is a member of `oneof`_ ``_all_instances``. instances (Sequence[str]): The list of URLs of one or more instances for which you want to apply updates. Each URL can be a full URL or a partial @@ -36458,6 +41598,8 @@ class RegionInstanceGroupManagersApplyUpdatesRequest(proto.Message): update requires a more disruptive action than you set with this flag, the necessary action is performed to execute the update. + + This field is a member of `oneof`_ ``_minimal_action``. most_disruptive_allowed_action (str): The most disruptive action that you want to perform on each instance during the update: - @@ -36469,6 +41611,8 @@ class RegionInstanceGroupManagersApplyUpdatesRequest(proto.Message): REPLACE. If your update requires a more disruptive action than you set with this flag, the update request will fail. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. """ all_instances = proto.Field(proto.BOOL, number=403676512, optional=True,) @@ -36481,6 +41625,7 @@ class RegionInstanceGroupManagersApplyUpdatesRequest(proto.Message): class RegionInstanceGroupManagersCreateInstancesRequest(proto.Message): r"""RegionInstanceGroupManagers.createInstances + Attributes: instances (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): [Required] List of specifications of per-instance configs. @@ -36493,6 +41638,7 @@ class RegionInstanceGroupManagersCreateInstancesRequest(proto.Message): class RegionInstanceGroupManagersDeleteInstancesRequest(proto.Message): r""" + Attributes: instances (Sequence[str]): The URLs of one or more instances to delete. This can be a @@ -36508,6 +41654,8 @@ class RegionInstanceGroupManagersDeleteInstancesRequest(proto.Message): malformed instance URL or a reference to an instance that exists in a zone or region other than the group's zone or region. + + This field is a member of `oneof`_ ``_skip_instances_on_validation_error``. """ instances = proto.RepeatedField(proto.STRING, number=29097598,) @@ -36518,6 +41666,7 @@ class RegionInstanceGroupManagersDeleteInstancesRequest(proto.Message): class RegionInstanceGroupManagersListErrorsResponse(proto.Message): r""" + Attributes: items (Sequence[google.cloud.compute_v1.types.InstanceManagedByIgmError]): [Output Only] The list of errors of the managed instance @@ -36529,6 +41678,8 @@ class RegionInstanceGroupManagersListErrorsResponse(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. """ @property @@ -36543,6 +41694,7 @@ def raw_page(self): class RegionInstanceGroupManagersListInstanceConfigsResp(proto.Message): r""" + Attributes: items (Sequence[google.cloud.compute_v1.types.PerInstanceConfig]): [Output Only] The list of PerInstanceConfig. @@ -36553,8 +41705,12 @@ class RegionInstanceGroupManagersListInstanceConfigsResp(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36572,6 +41728,7 @@ def raw_page(self): class RegionInstanceGroupManagersListInstancesResponse(proto.Message): r""" + Attributes: managed_instances (Sequence[google.cloud.compute_v1.types.ManagedInstance]): A list of managed instances. @@ -36582,6 +41739,8 @@ class RegionInstanceGroupManagersListInstancesResponse(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. """ @property @@ -36596,6 +41755,7 @@ def raw_page(self): class RegionInstanceGroupManagersRecreateRequest(proto.Message): r""" + Attributes: instances (Sequence[str]): The URLs of one or more instances to recreate. This can be a @@ -36608,12 +41768,15 @@ class RegionInstanceGroupManagersRecreateRequest(proto.Message): class RegionInstanceGroupManagersSetTargetPoolsRequest(proto.Message): r""" + Attributes: fingerprint (str): Fingerprint of the target pools information, which is a hash of the contents. This field is used for optimistic locking when you update the target pool entries. This field is optional. + + This field is a member of `oneof`_ ``_fingerprint``. target_pools (Sequence[str]): The URL of all TargetPool resources to which instances in the instanceGroup field are added. @@ -36627,10 +41790,13 @@ class RegionInstanceGroupManagersSetTargetPoolsRequest(proto.Message): class RegionInstanceGroupManagersSetTemplateRequest(proto.Message): r""" + Attributes: instance_template (str): URL of the InstanceTemplate resource from which all new instances will be created. + + This field is a member of `oneof`_ ``_instance_template``. """ instance_template = proto.Field(proto.STRING, number=309248228, optional=True,) @@ -36638,14 +41804,19 @@ class RegionInstanceGroupManagersSetTemplateRequest(proto.Message): class RegionInstanceGroupsListInstances(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.InstanceWithNamedPorts]): A list of InstanceWithNamedPorts resources. kind (str): The resource type. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -36653,10 +41824,16 @@ class RegionInstanceGroupsListInstances(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36677,17 +41854,22 @@ def raw_page(self): class RegionInstanceGroupsListInstancesRequest(proto.Message): r""" + Attributes: instance_state (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest.InstanceState): Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances. + + This field is a member of `oneof`_ ``_instance_state``. port_name (str): Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances. + + This field is a member of `oneof`_ ``_port_name``. """ class InstanceState(proto.Enum): @@ -36706,6 +41888,7 @@ class InstanceState(proto.Enum): class RegionInstanceGroupsSetNamedPortsRequest(proto.Message): r""" + Attributes: fingerprint (str): The fingerprint of the named ports @@ -36717,6 +41900,8 @@ class RegionInstanceGroupsSetNamedPortsRequest(proto.Message): fingerprint in your request to ensure that you do not overwrite changes that were applied from another concurrent request. + + This field is a member of `oneof`_ ``_fingerprint``. named_ports (Sequence[google.cloud.compute_v1.types.NamedPort]): The list of named ports to set for this instance group. @@ -36730,15 +41915,20 @@ class RegionInstanceGroupsSetNamedPortsRequest(proto.Message): class RegionList(proto.Message): r"""Contains a list of region resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Region]): A list of Region resources. kind (str): [Output Only] Type of resource. Always compute#regionList for lists of regions. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -36746,10 +41936,16 @@ class RegionList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -36768,6 +41964,7 @@ def raw_page(self): class RegionSetLabelsRequest(proto.Message): r""" + Attributes: label_fingerprint (str): The fingerprint of the previous set of labels @@ -36778,6 +41975,8 @@ class RegionSetLabelsRequest(proto.Message): to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.RegionSetLabelsRequest.LabelsEntry]): The labels to set for this resource. """ @@ -36788,6 +41987,7 @@ class RegionSetLabelsRequest(proto.Message): class RegionSetPolicyRequest(proto.Message): r""" + Attributes: bindings (Sequence[google.cloud.compute_v1.types.Binding]): Flatten Policy to create a backwacd @@ -36797,12 +41997,16 @@ class RegionSetPolicyRequest(proto.Message): Flatten Policy to create a backward compatible wire-format. Deprecated. Use 'policy' to specify the etag. + + This field is a member of `oneof`_ ``_etag``. policy (google.cloud.compute_v1.types.Policy): REQUIRED: The complete policy to be applied to the 'resource'. The size of the policy is limited to a few 10s of KB. An empty policy is in general a valid policy but certain services (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. """ bindings = proto.RepeatedField(proto.MESSAGE, number=403251854, message="Binding",) @@ -36814,6 +42018,7 @@ class RegionSetPolicyRequest(proto.Message): class RegionTargetHttpsProxiesSetSslCertificatesRequest(proto.Message): r""" + Attributes: ssl_certificates (Sequence[str]): New set of SslCertificate resources to @@ -36827,9 +42032,12 @@ class RegionTargetHttpsProxiesSetSslCertificatesRequest(proto.Message): class RegionUrlMapsValidateRequest(proto.Message): r""" + Attributes: resource (google.cloud.compute_v1.types.UrlMap): Content of the UrlMap to be validated. + + This field is a member of `oneof`_ ``_resource``. """ resource = proto.Field( @@ -36846,6 +42054,8 @@ class RemoveAssociationFirewallPolicyRequest(proto.Message): Name of the firewall policy to update. name (str): Name for the attachment that will be removed. + + This field is a member of `oneof`_ ``_name``. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -36862,6 +42072,8 @@ class RemoveAssociationFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -36894,6 +42106,8 @@ class RemoveHealthCheckTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool (str): Name of the target pool to remove health checks from. @@ -36935,6 +42149,8 @@ class RemoveInstanceTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool (str): Name of the TargetPool resource to remove instances from. @@ -36979,6 +42195,8 @@ class RemoveInstancesInstanceGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the instance group is located. @@ -37021,6 +42239,8 @@ class RemovePeeringNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network = proto.Field(proto.STRING, number=232872494,) @@ -37058,6 +42278,8 @@ class RemoveResourcePoliciesDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -37098,6 +42320,8 @@ class RemoveResourcePoliciesInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -37142,6 +42366,8 @@ class RemoveResourcePoliciesRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ disk = proto.Field(proto.STRING, number=3083677,) @@ -37165,6 +42391,8 @@ class RemoveRuleFirewallPolicyRequest(proto.Message): priority (int): The priority of the rule to remove from the firewall policy. + + This field is a member of `oneof`_ ``_priority``. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -37181,6 +42409,8 @@ class RemoveRuleFirewallPolicyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall_policy = proto.Field(proto.STRING, number=498173265,) @@ -37196,6 +42426,8 @@ class RemoveRuleSecurityPolicyRequest(proto.Message): priority (int): The priority of the rule to remove from the security policy. + + This field is a member of `oneof`_ ``_priority``. project (str): Project ID for this request. security_policy (str): @@ -37218,6 +42450,8 @@ class RequestMirrorPolicy(proto.Message): backend_service (str): The full or partial URL to the BackendService resource being mirrored to. + + This field is a member of `oneof`_ ``_backend_service``. """ backend_service = proto.Field(proto.STRING, number=306946058, optional=True,) @@ -37234,18 +42468,28 @@ class Reservation(proto.Message): [Output Only] Full or partial URL to a parent commitment. This field displays for reservations that are tied to a commitment. + + This field is a member of `oneof`_ ``_commitment``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#reservations for reservations. + + This field is a member of `oneof`_ ``_kind``. name (str): The name of the resource, provided by the client when initially creating the resource. The resource name must be @@ -37255,26 +42499,40 @@ class Reservation(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. satisfies_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] Server-defined fully-qualified URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. specific_reservation (google.cloud.compute_v1.types.AllocationSpecificSKUReservation): Reservation for instances with specific machine shapes. + + This field is a member of `oneof`_ ``_specific_reservation``. specific_reservation_required (bool): Indicates whether the reservation can be consumed by VMs with affinity for "any" reservation. If the field is set, then only VMs that target the reservation by name can consume from this reservation. + + This field is a member of `oneof`_ ``_specific_reservation_required``. status (google.cloud.compute_v1.types.Reservation.Status): [Output Only] The status of the reservation. + + This field is a member of `oneof`_ ``_status``. zone (str): Zone in which the reservation resides. A zone must be provided if the reservation is created within a commitment. + + This field is a member of `oneof`_ ``_zone``. """ class Status(proto.Enum): @@ -37317,11 +42575,15 @@ class ReservationAffinity(proto.Message): can consume resources: ANY_RESERVATION (default), SPECIFIC_RESERVATION, or NO_RESERVATION. See Consuming reserved instances for examples. + + This field is a member of `oneof`_ ``_consume_reservation_type``. key (str): Corresponds to the label key of a reservation resource. To target a SPECIFIC_RESERVATION by name, specify googleapis.com/reservation-name as the key and specify the name of your reservation as its value. + + This field is a member of `oneof`_ ``_key``. values (Sequence[str]): Corresponds to the label values of a reservation resource. This can be either a name @@ -37351,14 +42613,19 @@ class ConsumeReservationType(proto.Enum): class ReservationAggregatedList(proto.Message): r"""Contains a list of reservations. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ReservationAggregatedList.ItemsEntry]): A list of Allocation resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -37366,12 +42633,18 @@ class ReservationAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -37393,15 +42666,20 @@ def raw_page(self): class ReservationList(proto.Message): r""" + Attributes: id (str): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Reservation]): [Output Only] A list of Allocation resources. kind (str): [Output Only] Type of resource.Always compute#reservationsList for listsof reservations + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -37409,10 +42687,16 @@ class ReservationList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -37431,10 +42715,13 @@ def raw_page(self): class ReservationsResizeRequest(proto.Message): r""" + Attributes: specific_sku_count (int): Number of allocated resources can be resized with minimum = 1 and maximum = 1000. + + This field is a member of `oneof`_ ``_specific_sku_count``. """ specific_sku_count = proto.Field(proto.INT64, number=13890720, optional=True,) @@ -37442,6 +42729,7 @@ class ReservationsResizeRequest(proto.Message): class ReservationsScopedList(proto.Message): r""" + Attributes: reservations (Sequence[google.cloud.compute_v1.types.Reservation]): A list of reservations contained in this @@ -37449,6 +42737,8 @@ class ReservationsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of reservations when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ reservations = proto.RepeatedField( @@ -37484,6 +42774,8 @@ class ResetInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -37521,6 +42813,8 @@ class ResizeDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -37559,6 +42853,8 @@ class ResizeInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. size (int): The number of running instances that the managed instance group should maintain at any @@ -37606,6 +42902,8 @@ class ResizeRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ disk = proto.Field(proto.STRING, number=3083677,) @@ -37644,6 +42942,8 @@ class ResizeRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. size (int): Number of instances that should exist in this instance group manager. @@ -37679,6 +42979,8 @@ class ResizeReservationRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. reservation (str): Name of the reservation to update. reservations_resize_request_resource (google.cloud.compute_v1.types.ReservationsResizeRequest): @@ -37704,6 +43006,8 @@ class ResourceCommitment(proto.Message): accelerator_type (str): Name of the accelerator type resource. Applicable only when the type is ACCELERATOR. + + This field is a member of `oneof`_ ``_accelerator_type``. amount (int): The amount of the resource purchased (in a type-dependent unit, such as bytes). For vCPUs, @@ -37711,9 +43015,13 @@ class ResourceCommitment(proto.Message): must be provided in MB. Memory must be a multiple of 256 MB, with up to 6.5GB of memory per every vCPU. + + This field is a member of `oneof`_ ``_amount``. type_ (google.cloud.compute_v1.types.ResourceCommitment.Type): Type of resource for which this commitment applies. Possible values are VCPU and MEMORY + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -37734,11 +43042,14 @@ class Type(proto.Enum): class ResourceGroupReference(proto.Message): r""" + Attributes: group (str): A URI referencing one of the instance groups or network endpoint groups listed in the backend service. + + This field is a member of `oneof`_ ``_group``. """ group = proto.Field(proto.STRING, number=98629247, optional=True,) @@ -37746,6 +43057,7 @@ class ResourceGroupReference(proto.Message): class ResourcePoliciesScopedList(proto.Message): r""" + Attributes: resource_policies (Sequence[google.cloud.compute_v1.types.ResourcePolicy]): A list of resourcePolicies contained in this @@ -37753,6 +43065,8 @@ class ResourcePoliciesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of resourcePolicies when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ resource_policies = proto.RepeatedField( @@ -37772,20 +43086,31 @@ class ResourcePolicy(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): + This field is a member of `oneof`_ ``_description``. group_placement_policy (google.cloud.compute_v1.types.ResourcePolicyGroupPlacementPolicy): Resource policy for instances for placement configuration. + + This field is a member of `oneof`_ ``_group_placement_policy``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. instance_schedule_policy (google.cloud.compute_v1.types.ResourcePolicyInstanceSchedulePolicy): Resource policy for scheduling instance operations. + + This field is a member of `oneof`_ ``_instance_schedule_policy``. kind (str): [Output Only] Type of the resource. Always compute#resource_policies for resource policies. + + This field is a member of `oneof`_ ``_kind``. name (str): The name of the resource, provided by the client when initially creating the resource. The resource name must be @@ -37795,18 +43120,29 @@ class ResourcePolicy(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. region (str): + This field is a member of `oneof`_ ``_region``. resource_status (google.cloud.compute_v1.types.ResourcePolicyResourceStatus): [Output Only] The system status of the resource policy. + + This field is a member of `oneof`_ ``_resource_status``. self_link (str): [Output Only] Server-defined fully-qualified URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. snapshot_schedule_policy (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicy): Resource policy for persistent disks for creating snapshots. + + This field is a member of `oneof`_ ``_snapshot_schedule_policy``. status (google.cloud.compute_v1.types.ResourcePolicy.Status): [Output Only] The status of resource policy creation. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -37854,16 +43190,22 @@ class Status(proto.Enum): class ResourcePolicyAggregatedList(proto.Message): r"""Contains a list of resourcePolicies. + Attributes: etag (str): + This field is a member of `oneof`_ ``_etag``. id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ResourcePolicyAggregatedList.ItemsEntry]): A list of ResourcePolicy resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -37871,12 +43213,18 @@ class ResourcePolicyAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -37902,20 +43250,27 @@ def raw_page(self): class ResourcePolicyDailyCycle(proto.Message): r"""Time window specified for daily operations. + Attributes: days_in_cycle (int): Defines a schedule with units measured in months. The value determines how many months pass between the start of each cycle. + + This field is a member of `oneof`_ ``_days_in_cycle``. duration (str): [Output only] A predetermined duration for the window, automatically chosen to be the smallest possible in the given scenario. + + This field is a member of `oneof`_ ``_duration``. start_time (str): Start time of the window. This must be in UTC format that resolves to one of 00:00, 04:00, 08:00, 12:00, 16:00, or 20:00. For example, both 13:00-5 and 08:00 are valid. + + This field is a member of `oneof`_ ``_start_time``. """ days_in_cycle = proto.Field(proto.INT32, number=369790004, optional=True,) @@ -37934,10 +43289,16 @@ class ResourcePolicyGroupPlacementPolicy(proto.Message): will be spread across. If two instances are in different availability domain, they will not be put in the same low latency network + + This field is a member of `oneof`_ ``_availability_domain_count``. collocation (google.cloud.compute_v1.types.ResourcePolicyGroupPlacementPolicy.Collocation): Specifies network collocation + + This field is a member of `oneof`_ ``_collocation``. vm_count (int): Number of vms in this placement group + + This field is a member of `oneof`_ ``_vm_count``. """ class Collocation(proto.Enum): @@ -37957,17 +43318,24 @@ class Collocation(proto.Enum): class ResourcePolicyHourlyCycle(proto.Message): r"""Time window specified for hourly operations. + Attributes: duration (str): [Output only] Duration of the time window, automatically chosen to be smallest possible in the given scenario. + + This field is a member of `oneof`_ ``_duration``. hours_in_cycle (int): Defines a schedule with units measured in hours. The value determines how many hours pass between the start of each cycle. + + This field is a member of `oneof`_ ``_hours_in_cycle``. start_time (str): Time within the window to start the operations. It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT. + + This field is a member of `oneof`_ ``_start_time``. """ duration = proto.Field(proto.STRING, number=155471252, optional=True,) @@ -37983,20 +43351,30 @@ class ResourcePolicyInstanceSchedulePolicy(proto.Message): expiration_time (str): The expiration time of the schedule. The timestamp is an RFC3339 string. + + This field is a member of `oneof`_ ``_expiration_time``. start_time (str): The start time of the schedule. The timestamp is an RFC3339 string. + + This field is a member of `oneof`_ ``_start_time``. time_zone (str): Specifies the time zone to be used in interpreting Schedule.schedule. The value of this field must be a time zone name from the tz database: http://en.wikipedia.org/wiki/Tz_database. + + This field is a member of `oneof`_ ``_time_zone``. vm_start_schedule (google.cloud.compute_v1.types.ResourcePolicyInstanceSchedulePolicySchedule): Specifies the schedule for starting instances. + + This field is a member of `oneof`_ ``_vm_start_schedule``. vm_stop_schedule (google.cloud.compute_v1.types.ResourcePolicyInstanceSchedulePolicySchedule): Specifies the schedule for stopping instances. + + This field is a member of `oneof`_ ``_vm_stop_schedule``. """ expiration_time = proto.Field(proto.STRING, number=230299229, optional=True,) @@ -38018,10 +43396,13 @@ class ResourcePolicyInstanceSchedulePolicy(proto.Message): class ResourcePolicyInstanceSchedulePolicySchedule(proto.Message): r"""Schedule for an instance operation. + Attributes: schedule (str): Specifies the frequency for the operation, using the unix-cron format. + + This field is a member of `oneof`_ ``_schedule``. """ schedule = proto.Field(proto.STRING, number=375820951, optional=True,) @@ -38029,17 +43410,23 @@ class ResourcePolicyInstanceSchedulePolicySchedule(proto.Message): class ResourcePolicyList(proto.Message): r""" + Attributes: etag (str): + This field is a member of `oneof`_ ``_etag``. id (str): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ResourcePolicy]): [Output Only] A list of ResourcePolicy resources. kind (str): [Output Only] Type of resource.Always compute#resourcePoliciesList for listsof resourcePolicies + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -38047,10 +43434,16 @@ class ResourcePolicyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -38081,6 +43474,8 @@ class ResourcePolicyResourceStatus(proto.Message): [Output Only] Specifies a set of output values reffering to the instance_schedule_policy system status. This field should have the same name as corresponding policy field. + + This field is a member of `oneof`_ ``_instance_schedule_policy``. """ instance_schedule_policy = proto.Field( @@ -38093,14 +43488,19 @@ class ResourcePolicyResourceStatus(proto.Message): class ResourcePolicyResourceStatusInstanceSchedulePolicyStatus(proto.Message): r""" + Attributes: last_run_start_time (str): [Output Only] The last time the schedule successfully ran. The timestamp is an RFC3339 string. + + This field is a member of `oneof`_ ``_last_run_start_time``. next_run_start_time (str): [Output Only] The next time the schedule is planned to run. The actual time might be slightly different. The timestamp is an RFC3339 string. + + This field is a member of `oneof`_ ``_next_run_start_time``. """ last_run_start_time = proto.Field(proto.STRING, number=303069063, optional=True,) @@ -38117,14 +43517,20 @@ class ResourcePolicySnapshotSchedulePolicy(proto.Message): retention_policy (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicyRetentionPolicy): Retention policy applied to snapshots created by this resource policy. + + This field is a member of `oneof`_ ``_retention_policy``. schedule (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicySchedule): A Vm Maintenance Policy specifies what kind of infrastructure maintenance we are allowed to perform on this VM and when. Schedule that is applied to disks covered by this policy. + + This field is a member of `oneof`_ ``_schedule``. snapshot_properties (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicySnapshotProperties): Properties with which snapshots are created such as labels, encryption keys. + + This field is a member of `oneof`_ ``_snapshot_properties``. """ retention_policy = proto.Field( @@ -38149,13 +43555,18 @@ class ResourcePolicySnapshotSchedulePolicy(proto.Message): class ResourcePolicySnapshotSchedulePolicyRetentionPolicy(proto.Message): r"""Policy for retention of scheduled snapshots. + Attributes: max_retention_days (int): Maximum age of the snapshot that is allowed to be kept. + + This field is a member of `oneof`_ ``_max_retention_days``. on_source_disk_delete (google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicyRetentionPolicy.OnSourceDiskDelete): Specifies the behavior to apply to scheduled snapshots when the source disk is deleted. + + This field is a member of `oneof`_ ``_on_source_disk_delete``. """ class OnSourceDiskDelete(proto.Enum): @@ -38180,10 +43591,13 @@ class ResourcePolicySnapshotSchedulePolicySchedule(proto.Message): Attributes: daily_schedule (google.cloud.compute_v1.types.ResourcePolicyDailyCycle): + This field is a member of `oneof`_ ``_daily_schedule``. hourly_schedule (google.cloud.compute_v1.types.ResourcePolicyHourlyCycle): + This field is a member of `oneof`_ ``_hourly_schedule``. weekly_schedule (google.cloud.compute_v1.types.ResourcePolicyWeeklyCycle): + This field is a member of `oneof`_ ``_weekly_schedule``. """ daily_schedule = proto.Field( @@ -38213,9 +43627,13 @@ class ResourcePolicySnapshotSchedulePolicySnapshotProperties(proto.Message): Attributes: chain_name (str): Chain name that the snapshot is created in. + + This field is a member of `oneof`_ ``_chain_name``. guest_flush (bool): Indication to perform a 'guest aware' snapshot. + + This field is a member of `oneof`_ ``_guest_flush``. labels (Sequence[google.cloud.compute_v1.types.ResourcePolicySnapshotSchedulePolicySnapshotProperties.LabelsEntry]): Labels to apply to scheduled snapshots. These can be later modified by the setLabels method. @@ -38233,6 +43651,7 @@ class ResourcePolicySnapshotSchedulePolicySnapshotProperties(proto.Message): class ResourcePolicyWeeklyCycle(proto.Message): r"""Time window specified for weekly operations. + Attributes: day_of_weeks (Sequence[google.cloud.compute_v1.types.ResourcePolicyWeeklyCycleDayOfWeek]): Up to 7 intervals/windows, one for each day @@ -38246,6 +43665,7 @@ class ResourcePolicyWeeklyCycle(proto.Message): class ResourcePolicyWeeklyCycleDayOfWeek(proto.Message): r""" + Attributes: day (google.cloud.compute_v1.types.ResourcePolicyWeeklyCycleDayOfWeek.Day): Defines a schedule that runs on specific days @@ -38253,12 +43673,18 @@ class ResourcePolicyWeeklyCycleDayOfWeek(proto.Message): following options are available: MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY. + + This field is a member of `oneof`_ ``_day``. duration (str): [Output only] Duration of the time window, automatically chosen to be smallest possible in the given scenario. + + This field is a member of `oneof`_ ``_duration``. start_time (str): Time within the window to start the operations. It must be in format "HH:MM", where HH : [00-23] and MM : [00-00] GMT. + + This field is a member of `oneof`_ ``_start_time``. """ class Day(proto.Enum): @@ -38292,19 +43718,29 @@ class Route(proto.Message): [Output Only] AS path. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this field when you create the resource. + + This field is a member of `oneof`_ ``_description``. dest_range (str): The destination range of outgoing packets that this route applies to. Both IPv4 and IPv6 are supported. + + This field is a member of `oneof`_ ``_dest_range``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of this resource. Always compute#routes for Route resources. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -38315,15 +43751,21 @@ class Route(proto.Message): the last character) must be a dash, lowercase letter, or digit. The last character must be a lowercase letter or digit. + + This field is a member of `oneof`_ ``_name``. network (str): Fully-qualified URL of the network that this route applies to. + + This field is a member of `oneof`_ ``_network``. next_hop_gateway (str): The URL to a gateway that should handle matching packets. You can only specify the internet gateway using a full or partial valid URL: projects/ project/global/gateways/default- internet-gateway + + This field is a member of `oneof`_ ``_next_hop_gateway``. next_hop_ilb (str): The URL to a forwarding rule of type loadBalancingScheme=INTERNAL that should handle @@ -38333,24 +43775,36 @@ class Route(proto.Message): https://www.googleapis.com/compute/v1/projects/project/regions/region /forwardingRules/forwardingRule - regions/region/forwardingRules/forwardingRule + + This field is a member of `oneof`_ ``_next_hop_ilb``. next_hop_instance (str): The URL to an instance that should handle matching packets. You can specify this as a full or partial URL. For example: https://www.googleapis.com/compute/v1/projects/project/zones/zone/instances/ + + This field is a member of `oneof`_ ``_next_hop_instance``. next_hop_ip (str): The network IP address of an instance that should handle matching packets. Only IPv4 is supported. + + This field is a member of `oneof`_ ``_next_hop_ip``. next_hop_network (str): The URL of the local network if it should handle matching packets. + + This field is a member of `oneof`_ ``_next_hop_network``. next_hop_peering (str): [Output Only] The network peering name that should handle matching packets, which should conform to RFC1035. + + This field is a member of `oneof`_ ``_next_hop_peering``. next_hop_vpn_tunnel (str): The URL to a VpnTunnel that should handle matching packets. + + This field is a member of `oneof`_ ``_next_hop_vpn_tunnel``. priority (int): The priority of this route. Priority is used to break ties in cases where there is more than one matching route of @@ -38358,6 +43812,8 @@ class Route(proto.Message): equal prefix length, the one with the lowest-numbered priority value wins. The default value is ``1000``. The priority value must be from ``0`` to ``65535``, inclusive. + + This field is a member of `oneof`_ ``_priority``. route_type (google.cloud.compute_v1.types.Route.RouteType): [Output Only] The type of this route, which can be one of the following values: - 'TRANSIT' for a transit route that @@ -38365,9 +43821,13 @@ class Route(proto.Message): readvertise to one of its BGP peers - 'SUBNET' for a route from a subnet of the VPC - 'BGP' for a route learned from a BGP peer of this router - 'STATIC' for a static route + + This field is a member of `oneof`_ ``_route_type``. self_link (str): [Output Only] Server-defined fully-qualified URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. tags (Sequence[str]): A list of instance tags to which this route applies. @@ -38419,6 +43879,7 @@ class RouteType(proto.Enum): class RouteAsPath(proto.Message): r""" + Attributes: as_lists (Sequence[int]): [Output Only] The AS numbers of the AS Path. @@ -38432,6 +43893,8 @@ class RouteAsPath(proto.Message): the route has traversed - 'AS_CONFED_SET': unordered set of Member Autonomous Systems in the local confederation that the route has traversed + + This field is a member of `oneof`_ ``_path_segment_type``. """ class PathSegmentType(proto.Enum): @@ -38458,14 +43921,19 @@ class PathSegmentType(proto.Enum): class RouteList(proto.Message): r"""Contains a list of Route resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Route]): A list of Route resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -38473,10 +43941,16 @@ class RouteList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -38500,6 +43974,8 @@ class Router(proto.Message): Attributes: bgp (google.cloud.compute_v1.types.RouterBgp): BGP information specific to this router. + + This field is a member of `oneof`_ ``_bgp``. bgp_peers (Sequence[google.cloud.compute_v1.types.RouterBgpPeer]): BGP information that must be configured into the routing stack to establish BGP peering. This @@ -38508,18 +43984,26 @@ class Router(proto.Message): address. Please refer to RFC4273. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. encrypted_interconnect_router (bool): Indicates if a router is dedicated for use with encrypted VLAN attachments (interconnectAttachments). Not currently available publicly. + + This field is a member of `oneof`_ ``_encrypted_interconnect_router``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. interfaces (Sequence[google.cloud.compute_v1.types.RouterInterface]): Router interfaces. Each interface requires either one linked resource, (for example, @@ -38528,6 +44012,8 @@ class Router(proto.Message): kind (str): [Output Only] Type of resource. Always compute#router for routers. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -38537,18 +44023,26 @@ class Router(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. nats (Sequence[google.cloud.compute_v1.types.RouterNat]): A list of NAT services created in this router. network (str): URI of the network to which this router belongs. + + This field is a member of `oneof`_ ``_network``. region (str): [Output Only] URI of the region where the router resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ bgp = proto.Field(proto.MESSAGE, number=97483, optional=True, message="RouterBgp",) @@ -38574,12 +44068,17 @@ class Router(proto.Message): class RouterAdvertisedIpRange(proto.Message): r"""Description-tagged IP ranges for the router to advertise. + Attributes: description (str): User-specified description for the IP range. + + This field is a member of `oneof`_ ``_description``. range_ (str): The IP range to advertise. The value must be a CIDR-formatted string. + + This field is a member of `oneof`_ ``_range``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -38588,14 +44087,19 @@ class RouterAdvertisedIpRange(proto.Message): class RouterAggregatedList(proto.Message): r"""Contains a list of routers. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.RouterAggregatedList.ItemsEntry]): A list of Router resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -38603,12 +44107,18 @@ class RouterAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -38630,11 +44140,14 @@ def raw_page(self): class RouterBgp(proto.Message): r""" + Attributes: advertise_mode (google.cloud.compute_v1.types.RouterBgp.AdvertiseMode): User-specified flag to indicate which mode to use for advertisement. The options are DEFAULT or CUSTOM. + + This field is a member of `oneof`_ ``_advertise_mode``. advertised_groups (Sequence[google.cloud.compute_v1.types.RouterBgp.AdvertisedGroups]): User-specified list of prefix groups to advertise in custom mode. This field can only be populated if advertise_mode is @@ -38655,6 +44168,8 @@ class RouterBgp(proto.Message): 32-bit. The value will be fixed for this router resource. All VPN tunnels that link to this router will have the same local ASN. + + This field is a member of `oneof`_ ``_asn``. keepalive_interval (int): The interval in seconds between BGP keepalive messages that are sent to the peer. Hold time is @@ -38668,6 +44183,8 @@ class RouterBgp(proto.Message): BGP connection between the two peers. If set, this value must be between 20 and 60. The default is 20. + + This field is a member of `oneof`_ ``_keepalive_interval``. """ class AdvertiseMode(proto.Enum): @@ -38698,10 +44215,13 @@ class AdvertisedGroups(proto.Enum): class RouterBgpPeer(proto.Message): r""" + Attributes: advertise_mode (google.cloud.compute_v1.types.RouterBgpPeer.AdvertiseMode): User-specified flag to indicate which mode to use for advertisement. + + This field is a member of `oneof`_ ``_advertise_mode``. advertised_groups (Sequence[google.cloud.compute_v1.types.RouterBgpPeer.AdvertisedGroups]): User-specified list of prefix groups to advertise in custom mode, which can take one of the following options: - @@ -38724,8 +44244,12 @@ class RouterBgpPeer(proto.Message): peer. Where there is more than one matching route of maximum length, the routes with the lowest priority value win. + + This field is a member of `oneof`_ ``_advertised_route_priority``. bfd (google.cloud.compute_v1.types.RouterBgpPeerBfd): BFD configuration for the BGP peering. + + This field is a member of `oneof`_ ``_bfd``. enable (google.cloud.compute_v1.types.RouterBgpPeer.Enable): The status of the BGP peer connection. If set to FALSE, any active session with the peer is @@ -38733,12 +44257,18 @@ class RouterBgpPeer(proto.Message): information is removed. If set to TRUE, the peer connection can be established with routing information. The default is TRUE. + + This field is a member of `oneof`_ ``_enable``. interface_name (str): Name of the interface the BGP peer is associated with. + + This field is a member of `oneof`_ ``_interface_name``. ip_address (str): IP address of the interface inside Google Cloud Platform. Only IPv4 is supported. + + This field is a member of `oneof`_ ``_ip_address``. management_type (google.cloud.compute_v1.types.RouterBgpPeer.ManagementType): [Output Only] The resource that configures and manages this BGP peer. - MANAGED_BY_USER is the default value and can be @@ -38748,6 +44278,8 @@ class RouterBgpPeer(proto.Message): type PARTNER. Google automatically creates, updates, and deletes this type of BGP peer when the PARTNER InterconnectAttachment is created, updated, or deleted. + + This field is a member of `oneof`_ ``_management_type``. name (str): Name of this BGP peer. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must @@ -38756,12 +44288,18 @@ class RouterBgpPeer(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. peer_asn (int): Peer BGP Autonomous System Number (ASN). Each BGP interface may use a different value. + + This field is a member of `oneof`_ ``_peer_asn``. peer_ip_address (str): IP address of the BGP interface outside Google Cloud Platform. Only IPv4 is supported. + + This field is a member of `oneof`_ ``_peer_ip_address``. router_appliance_instance (str): URI of the VM instance that is used as third- arty router appliances such as Next Gen @@ -38770,6 +44308,8 @@ class RouterBgpPeer(proto.Message): zones contained in the same region as this Cloud Router. The VM instance is the peer side of the BGP session. + + This field is a member of `oneof`_ ``_router_appliance_instance``. """ class AdvertiseMode(proto.Enum): @@ -38840,6 +44380,7 @@ class ManagementType(proto.Enum): class RouterBgpPeerBfd(proto.Message): r""" + Attributes: min_receive_interval (int): The minimum interval, in milliseconds, @@ -38849,6 +44390,8 @@ class RouterBgpPeerBfd(proto.Message): greater of this value and the transmit interval of the other router. If set, this value must be between 1000 and 30000. The default is 1000. + + This field is a member of `oneof`_ ``_min_receive_interval``. min_transmit_interval (int): The minimum interval, in milliseconds, between BFD control packets transmitted to the @@ -38858,11 +44401,15 @@ class RouterBgpPeerBfd(proto.Message): receive interval of the other router. If set, this value must be between 1000 and 30000. The default is 1000. + + This field is a member of `oneof`_ ``_min_transmit_interval``. multiplier (int): The number of consecutive BFD packets that must be missed before BFD declares that a peer is unavailable. If set, the value must be a value between 5 and 16. The default is 5. + + This field is a member of `oneof`_ ``_multiplier``. session_initialization_mode (google.cloud.compute_v1.types.RouterBgpPeerBfd.SessionInitializationMode): The BFD session initialization mode for this BGP peer. If set to ACTIVE, the Cloud Router @@ -38872,6 +44419,8 @@ class RouterBgpPeerBfd(proto.Message): for this BGP peer. If set to DISABLED, BFD is disabled for this BGP peer. The default is PASSIVE. + + This field is a member of `oneof`_ ``_session_initialization_mode``. """ class SessionInitializationMode(proto.Enum): @@ -38897,6 +44446,7 @@ class SessionInitializationMode(proto.Enum): class RouterInterface(proto.Message): r""" + Attributes: ip_range (str): IP address and range of the interface. The IP @@ -38905,18 +44455,24 @@ class RouterInterface(proto.Message): formatted string, for example: 169.254.0.1/30. NOTE: Do not truncate the address as it represents the IP address of the interface. + + This field is a member of `oneof`_ ``_ip_range``. linked_interconnect_attachment (str): URI of the linked Interconnect attachment. It must be in the same region as the router. Each interface can have one linked resource, which can be a VPN tunnel, an Interconnect attachment, or a virtual machine instance. + + This field is a member of `oneof`_ ``_linked_interconnect_attachment``. linked_vpn_tunnel (str): URI of the linked VPN tunnel, which must be in the same region as the router. Each interface can have one linked resource, which can be a VPN tunnel, an Interconnect attachment, or a virtual machine instance. + + This field is a member of `oneof`_ ``_linked_vpn_tunnel``. management_type (google.cloud.compute_v1.types.RouterInterface.ManagementType): [Output Only] The resource that configures and manages this interface. - MANAGED_BY_USER is the default value and can be @@ -38926,6 +44482,8 @@ class RouterInterface(proto.Message): type PARTNER. Google automatically creates, updates, and deletes this type of interface when the PARTNER InterconnectAttachment is created, updated, or deleted. + + This field is a member of `oneof`_ ``_management_type``. name (str): Name of this interface entry. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the @@ -38934,12 +44492,16 @@ class RouterInterface(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. private_ip_address (str): The regional private internal IP address that is used to establish BGP sessions to a VM instance acting as a third-party Router Appliance, such as a Next Gen Firewall, a Virtual Router, or an SD-WAN VM. + + This field is a member of `oneof`_ ``_private_ip_address``. redundant_interface (str): Name of the interface that will be redundant with the current interface you are creating. The redundantInterface @@ -38954,6 +44516,8 @@ class RouterInterface(proto.Message): means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_redundant_interface``. subnetwork (str): The URI of the subnetwork resource that this interface belongs to, which must be in the same @@ -38962,6 +44526,8 @@ class RouterInterface(proto.Message): interface, the VM instance must belong to the same subnetwork as the subnetwork specified here. + + This field is a member of `oneof`_ ``_subnetwork``. """ class ManagementType(proto.Enum): @@ -38993,15 +44559,20 @@ class ManagementType(proto.Enum): class RouterList(proto.Message): r"""Contains a list of Router resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Router]): A list of Router resources. kind (str): [Output Only] Type of resource. Always compute#router for routers. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -39009,10 +44580,16 @@ class RouterList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -39046,11 +44623,16 @@ class RouterNat(proto.Message): only. enable_endpoint_independent_mapping (bool): + This field is a member of `oneof`_ ``_enable_endpoint_independent_mapping``. icmp_idle_timeout_sec (int): Timeout (in seconds) for ICMP connections. Defaults to 30s if not set. + + This field is a member of `oneof`_ ``_icmp_idle_timeout_sec``. log_config (google.cloud.compute_v1.types.RouterNatLogConfig): Configure logging on this NAT. + + This field is a member of `oneof`_ ``_log_config``. min_ports_per_vm (int): Minimum number of ports allocated to a VM from this NAT config. If not set, a default @@ -39058,10 +44640,14 @@ class RouterNat(proto.Message): rounded up to the nearest power of 2. For example, if the value of this field is 50, at least 64 ports are allocated to a VM. + + This field is a member of `oneof`_ ``_min_ports_per_vm``. name (str): Unique name of this Nat service. The name must be 1-63 characters long and comply with RFC1035. + + This field is a member of `oneof`_ ``_name``. nat_ip_allocate_option (google.cloud.compute_v1.types.RouterNat.NatIpAllocateOption): Specify the NatIpAllocateOption, which can take one of the following values: - MANUAL_ONLY: Uses only Nat IP addresses @@ -39070,6 +44656,8 @@ class RouterNat(proto.Message): IPs are allocated by Google Cloud Platform; customers can't specify any Nat IPs. When choosing AUTO_ONLY, then nat_ip should be empty. + + This field is a member of `oneof`_ ``_nat_ip_allocate_option``. nat_ips (Sequence[str]): A list of URLs of the IP resources used for this Nat service. These IP addresses must be @@ -39090,6 +44678,8 @@ class RouterNat(proto.Message): ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES, then there should not be any other Router.Nat section in any Router for this network in this region. + + This field is a member of `oneof`_ ``_source_subnetwork_ip_ranges_to_nat``. subnetworks (Sequence[google.cloud.compute_v1.types.RouterNatSubnetworkToNat]): A list of Subnetwork resources whose traffic should be translated by NAT Gateway. It is used only when @@ -39098,15 +44688,23 @@ class RouterNat(proto.Message): tcp_established_idle_timeout_sec (int): Timeout (in seconds) for TCP established connections. Defaults to 1200s if not set. + + This field is a member of `oneof`_ ``_tcp_established_idle_timeout_sec``. tcp_time_wait_timeout_sec (int): Timeout (in seconds) for TCP connections that are in TIME_WAIT state. Defaults to 120s if not set. + + This field is a member of `oneof`_ ``_tcp_time_wait_timeout_sec``. tcp_transitory_idle_timeout_sec (int): Timeout (in seconds) for TCP transitory connections. Defaults to 30s if not set. + + This field is a member of `oneof`_ ``_tcp_transitory_idle_timeout_sec``. udp_idle_timeout_sec (int): Timeout (in seconds) for UDP connections. Defaults to 30s if not set. + + This field is a member of `oneof`_ ``_udp_idle_timeout_sec``. """ class NatIpAllocateOption(proto.Enum): @@ -39177,10 +44775,13 @@ class SourceSubnetworkIpRangesToNat(proto.Enum): class RouterNatLogConfig(proto.Message): r"""Configuration of logging on a NAT. + Attributes: enable (bool): Indicates whether or not to export logs. This is false by default. + + This field is a member of `oneof`_ ``_enable``. filter (google.cloud.compute_v1.types.RouterNatLogConfig.Filter): Specify the desired filtering of logs on this NAT. If unspecified, logs are exported for all connections handled @@ -39189,6 +44790,8 @@ class RouterNatLogConfig(proto.Message): failures. - TRANSLATIONS_ONLY: Export logs only for successful connections. - ALL: Export logs for all connections, successful and unsuccessful. + + This field is a member of `oneof`_ ``_filter``. """ class Filter(proto.Enum): @@ -39210,12 +44813,17 @@ class Filter(proto.Enum): class RouterNatRule(proto.Message): r""" + Attributes: action (google.cloud.compute_v1.types.RouterNatRuleAction): The action to be enforced for traffic that matches this rule. + + This field is a member of `oneof`_ ``_action``. description (str): An optional description of this rule. + + This field is a member of `oneof`_ ``_description``. match (str): CEL expression that specifies the match condition that egress traffic from a VM is evaluated against. If it @@ -39226,11 +44834,15 @@ class RouterNatRule(proto.Message): '1.1.0.1' \|\| destination.ip == '8.8.8.8'" The following example is a valid match expression for private NAT: "nexthop.hub == '/projects/my-project/global/hub/hub-1'". + + This field is a member of `oneof`_ ``_match``. rule_number (int): An integer uniquely identifying a rule in the list. The rule number must be a positive value between 0 and 65000, and must be unique among rules within a NAT. + + This field is a member of `oneof`_ ``_rule_number``. """ action = proto.Field( @@ -39243,6 +44855,7 @@ class RouterNatRule(proto.Message): class RouterNatRuleAction(proto.Message): r""" + Attributes: source_nat_active_ips (Sequence[str]): A list of URLs of the IP resources used for @@ -39263,10 +44876,13 @@ class RouterNatRuleAction(proto.Message): class RouterNatSubnetworkToNat(proto.Message): r"""Defines the IP ranges that want to use NAT for a subnetwork. + Attributes: name (str): URL for the subnetwork resource that will use NAT. + + This field is a member of `oneof`_ ``_name``. secondary_ip_range_names (Sequence[str]): A list of the secondary ranges of the Subnetwork that are allowed to use NAT. This can be populated only if @@ -39296,6 +44912,7 @@ class SourceIpRangesToNat(proto.Enum): class RouterStatus(proto.Message): r""" + Attributes: best_routes (Sequence[google.cloud.compute_v1.types.Route]): Best routes for this router's network. @@ -39308,6 +44925,8 @@ class RouterStatus(proto.Message): network (str): URI of the network to which this router belongs. + + This field is a member of `oneof`_ ``_network``. """ best_routes = proto.RepeatedField(proto.MESSAGE, number=395826693, message="Route",) @@ -39325,39 +44944,60 @@ class RouterStatus(proto.Message): class RouterStatusBgpPeerStatus(proto.Message): r""" + Attributes: advertised_routes (Sequence[google.cloud.compute_v1.types.Route]): Routes that were advertised to the remote BGP peer ip_address (str): IP address of the local BGP interface. + + This field is a member of `oneof`_ ``_ip_address``. linked_vpn_tunnel (str): URL of the VPN tunnel that this BGP peer controls. + + This field is a member of `oneof`_ ``_linked_vpn_tunnel``. name (str): Name of this BGP peer. Unique within the Routers resource. + + This field is a member of `oneof`_ ``_name``. num_learned_routes (int): Number of routes learned from the remote BGP Peer. + + This field is a member of `oneof`_ ``_num_learned_routes``. peer_ip_address (str): IP address of the remote BGP interface. + + This field is a member of `oneof`_ ``_peer_ip_address``. router_appliance_instance (str): [Output only] URI of the VM instance that is used as third-party router appliances such as Next Gen Firewalls, Virtual Routers, or Router Appliances. The VM instance is the peer side of the BGP session. + + This field is a member of `oneof`_ ``_router_appliance_instance``. state (str): BGP state as specified in RFC1771. + + This field is a member of `oneof`_ ``_state``. status (google.cloud.compute_v1.types.RouterStatusBgpPeerStatus.Status): Status of the BGP peer: {UP, DOWN} + + This field is a member of `oneof`_ ``_status``. uptime (str): Time this session has been up. Format: 14 years, 51 weeks, 6 days, 23 hours, 59 minutes, 59 seconds + + This field is a member of `oneof`_ ``_uptime``. uptime_seconds (str): Time this session has been up, in seconds. Format: 145 + + This field is a member of `oneof`_ ``_uptime_seconds``. """ class Status(proto.Enum): @@ -39386,6 +45026,7 @@ class Status(proto.Enum): class RouterStatusNatStatus(proto.Message): r"""Status of a NAT contained in this router. + Attributes: auto_allocated_nat_ips (Sequence[str]): A list of IPs auto-allocated for NAT. Example: ["1.1.1.1", @@ -39401,11 +45042,17 @@ class RouterStatusNatStatus(proto.Message): than 0 only if user-specified IPs are NOT enough to allow all configured VMs to use NAT. This value is meaningful only when auto-allocation of NAT IPs is *not* used. + + This field is a member of `oneof`_ ``_min_extra_nat_ips_needed``. name (str): Unique name of this NAT. + + This field is a member of `oneof`_ ``_name``. num_vm_endpoints_with_nat_mappings (int): Number of VM endpoints (i.e., Nics) that can use NAT. + + This field is a member of `oneof`_ ``_num_vm_endpoints_with_nat_mappings``. rule_status (Sequence[google.cloud.compute_v1.types.RouterStatusNatStatusNatRuleStatus]): Status of rules in this NAT. user_allocated_nat_ip_resources (Sequence[str]): @@ -39437,6 +45084,7 @@ class RouterStatusNatStatus(proto.Message): class RouterStatusNatStatusNatRuleStatus(proto.Message): r"""Status of a NAT Rule contained in this NAT. + Attributes: active_nat_ips (Sequence[str]): A list of active IPs for NAT. Example: ["1.1.1.1", @@ -39449,11 +45097,17 @@ class RouterStatusNatStatusNatRuleStatus(proto.Message): will be greater than 0 only if the existing IPs in this NAT Rule are NOT enough to allow all configured VMs to use NAT. + + This field is a member of `oneof`_ ``_min_extra_ips_needed``. num_vm_endpoints_with_nat_mappings (int): Number of VM endpoints (i.e., NICs) that have NAT Mappings from this NAT Rule. + + This field is a member of `oneof`_ ``_num_vm_endpoints_with_nat_mappings``. rule_number (int): Rule number of the rule. + + This field is a member of `oneof`_ ``_rule_number``. """ active_nat_ips = proto.RepeatedField(proto.STRING, number=208517077,) @@ -39467,11 +45121,15 @@ class RouterStatusNatStatusNatRuleStatus(proto.Message): class RouterStatusResponse(proto.Message): r""" + Attributes: kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. result (google.cloud.compute_v1.types.RouterStatus): + This field is a member of `oneof`_ ``_result``. """ kind = proto.Field(proto.STRING, number=3292052, optional=True,) @@ -39482,9 +45140,12 @@ class RouterStatusResponse(proto.Message): class RoutersPreviewResponse(proto.Message): r""" + Attributes: resource (google.cloud.compute_v1.types.Router): Preview of given router. + + This field is a member of `oneof`_ ``_resource``. """ resource = proto.Field( @@ -39494,12 +45155,15 @@ class RoutersPreviewResponse(proto.Message): class RoutersScopedList(proto.Message): r""" + Attributes: routers (Sequence[google.cloud.compute_v1.types.Router]): A list of routers contained in this scope. warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of routers when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ routers = proto.RepeatedField(proto.MESSAGE, number=311906890, message="Router",) @@ -39510,16 +45174,21 @@ class RoutersScopedList(proto.Message): class Rule(proto.Message): r"""This is deprecated and has no effect. Do not use. + Attributes: action (google.cloud.compute_v1.types.Rule.Action): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_action``. conditions (Sequence[google.cloud.compute_v1.types.Condition]): This is deprecated and has no effect. Do not use. description (str): This is deprecated and has no effect. Do not use. + + This field is a member of `oneof`_ ``_description``. ins (Sequence[str]): This is deprecated and has no effect. Do not use. @@ -39559,14 +45228,19 @@ class Action(proto.Enum): class SSLHealthCheck(proto.Message): r""" + Attributes: port (int): The TCP port number for the health check request. The default value is 443. Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. port_name (str): Port name as defined in InstanceGroup#NamedPort#name. If both port and port_name are defined, port takes precedence. + + This field is a member of `oneof`_ ``_port_name``. port_specification (google.cloud.compute_v1.types.SSLHealthCheck.PortSpecification): Specifies how port is selected for health checking, can be one of following values: USE_FIXED_PORT: The port number in @@ -39577,10 +45251,14 @@ class SSLHealthCheck(proto.Message): the port or named port specified in the Backend Service is used for health checking. If not specified, SSL health check follows behavior specified in port and portName fields. + + This field is a member of `oneof`_ ``_port_specification``. proxy_header (google.cloud.compute_v1.types.SSLHealthCheck.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. request (str): The application data to send once the SSL connection has been established (default value @@ -39588,11 +45266,15 @@ class SSLHealthCheck(proto.Message): empty, the connection establishment alone will indicate health. The request data can only be ASCII. + + This field is a member of `oneof`_ ``_request``. response (str): The bytes to match against the beginning of the response data. If left empty (the default value), any response will indicate health. The response data can only be ASCII. + + This field is a member of `oneof`_ ``_response``. """ class PortSpecification(proto.Enum): @@ -39632,19 +45314,26 @@ class ProxyHeader(proto.Enum): class ScalingScheduleStatus(proto.Message): r""" + Attributes: last_start_time (str): [Output Only] The last time the scaling schedule became active. Note: this is a timestamp when a schedule actually became active, not when it was planned to do so. The timestamp is in RFC3339 text format. + + This field is a member of `oneof`_ ``_last_start_time``. next_start_time (str): [Output Only] The next time the scaling schedule is to become active. Note: this is a timestamp when a schedule is planned to run, but the actual time might be slightly different. The timestamp is in RFC3339 text format. + + This field is a member of `oneof`_ ``_next_start_time``. state (google.cloud.compute_v1.types.ScalingScheduleStatus.State): [Output Only] The current state of a scaling schedule. + + This field is a member of `oneof`_ ``_state``. """ class State(proto.Enum): @@ -39662,6 +45351,7 @@ class State(proto.Enum): class Scheduling(proto.Message): r"""Sets the scheduling options for an Instance. NextID: 21 + Attributes: automatic_restart (bool): Specifies whether the instance should be @@ -39672,15 +45362,21 @@ class Scheduling(proto.Message): be automatically restarted. By default, this is set to true so an instance is automatically restarted if it is terminated by Compute Engine. + + This field is a member of `oneof`_ ``_automatic_restart``. location_hint (str): An opaque location hint used to place the instance close to other resources. This field is for use by internal tools that use the public API. + + This field is a member of `oneof`_ ``_location_hint``. min_node_cpus (int): The minimum number of virtual CPUs this instance will consume when running on a sole- tenant node. + + This field is a member of `oneof`_ ``_min_node_cpus``. node_affinities (Sequence[google.cloud.compute_v1.types.SchedulingNodeAffinity]): A set of node affinity and anti-affinity configurations. Refer to Configuring node @@ -39693,12 +45389,16 @@ class Scheduling(proto.Message): the default and only possible behavior is TERMINATE. For more information, see Setting Instance Scheduling Options. + + This field is a member of `oneof`_ ``_on_host_maintenance``. preemptible (bool): Defines whether the instance is preemptible. This can only be set during instance creation or while the instance is stopped and therefore, in a ``TERMINATED`` state. See Instance Life Cycle for more information on the possible instance states. + + This field is a member of `oneof`_ ``_preemptible``. """ class OnHostMaintenance(proto.Enum): @@ -39732,9 +45432,13 @@ class SchedulingNodeAffinity(proto.Message): key (str): Corresponds to the label key of Node resource. + + This field is a member of `oneof`_ ``_key``. operator (google.cloud.compute_v1.types.SchedulingNodeAffinity.Operator): Defines the operation of node selection. Valid operators are IN for affinity and NOT_IN for anti-affinity. + + This field is a member of `oneof`_ ``_operator``. values (Sequence[str]): Corresponds to the label values of Node resource. @@ -39756,9 +45460,12 @@ class Operator(proto.Enum): class ScratchDisks(proto.Message): r""" + Attributes: disk_gb (int): Size of the scratch disk, defined in GB. + + This field is a member of `oneof`_ ``_disk_gb``. """ disk_gb = proto.Field(proto.INT32, number=60990141, optional=True,) @@ -39766,12 +45473,17 @@ class ScratchDisks(proto.Message): class Screenshot(proto.Message): r"""An instance's screenshot. + Attributes: contents (str): [Output Only] The Base64-encoded screenshot data. + + This field is a member of `oneof`_ ``_contents``. kind (str): [Output Only] Type of the resource. Always compute#screenshot for the screenshots. + + This field is a member of `oneof`_ ``_kind``. """ contents = proto.Field(proto.STRING, number=506419994, optional=True,) @@ -39780,9 +45492,11 @@ class Screenshot(proto.Message): class SecurityPoliciesListPreconfiguredExpressionSetsResponse(proto.Message): r""" + Attributes: preconfigured_expression_sets (google.cloud.compute_v1.types.SecurityPoliciesWafConfig): + This field is a member of `oneof`_ ``_preconfigured_expression_sets``. """ preconfigured_expression_sets = proto.Field( @@ -39795,9 +45509,11 @@ class SecurityPoliciesListPreconfiguredExpressionSetsResponse(proto.Message): class SecurityPoliciesWafConfig(proto.Message): r""" + Attributes: waf_rules (google.cloud.compute_v1.types.PreconfiguredWafSet): + This field is a member of `oneof`_ ``_waf_rules``. """ waf_rules = proto.Field( @@ -39814,14 +45530,20 @@ class SecurityPolicy(proto.Message): Attributes: adaptive_protection_config (google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfig): + This field is a member of `oneof`_ ``_adaptive_protection_config``. advanced_options_config (google.cloud.compute_v1.types.SecurityPolicyAdvancedOptionsConfig): + This field is a member of `oneof`_ ``_advanced_options_config``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Specifies a fingerprint for this resource, which is essentially a hash of the metadata's @@ -39834,12 +45556,18 @@ class SecurityPolicy(proto.Message): fail with error 412 conditionNotMet. To see the latest fingerprint, make get() request to the security policy. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output only] Type of the resource. Always compute#securityPolicyfor security policies + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -39849,6 +45577,8 @@ class SecurityPolicy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. rules (Sequence[google.cloud.compute_v1.types.SecurityPolicyRule]): A list of rules that belong to this policy. There must always be a default rule (rule with priority 2147483647 and @@ -39857,6 +45587,8 @@ class SecurityPolicy(proto.Message): added. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. """ adaptive_protection_config = proto.Field( @@ -39891,6 +45623,8 @@ class SecurityPolicyAdaptiveProtectionConfig(proto.Message): layer7_ddos_defense_config (google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig): If set to true, enables Cloud Armor Machine Learning. + + This field is a member of `oneof`_ ``_layer7_ddos_defense_config``. """ layer7_ddos_defense_config = proto.Field( @@ -39903,14 +45637,19 @@ class SecurityPolicyAdaptiveProtectionConfig(proto.Message): class SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig(proto.Message): r"""Configuration options for L7 DDoS detection. + Attributes: enable (bool): If set to true, enables CAAP for L7 DDoS detection. + + This field is a member of `oneof`_ ``_enable``. rule_visibility (google.cloud.compute_v1.types.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig.RuleVisibility): Rule visibility can be one of the following: STANDARD - opaque rules. (default) PREMIUM - transparent rules. + + This field is a member of `oneof`_ ``_rule_visibility``. """ class RuleVisibility(proto.Enum): @@ -39929,11 +45668,14 @@ class RuleVisibility(proto.Enum): class SecurityPolicyAdvancedOptionsConfig(proto.Message): r""" + Attributes: json_parsing (google.cloud.compute_v1.types.SecurityPolicyAdvancedOptionsConfig.JsonParsing): + This field is a member of `oneof`_ ``_json_parsing``. log_level (google.cloud.compute_v1.types.SecurityPolicyAdvancedOptionsConfig.LogLevel): + This field is a member of `oneof`_ ``_log_level``. """ class JsonParsing(proto.Enum): @@ -39956,15 +45698,20 @@ class LogLevel(proto.Enum): class SecurityPolicyList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.SecurityPolicy]): A list of SecurityPolicy resources. kind (str): [Output Only] Type of resource. Always compute#securityPolicyList for listsof securityPolicies + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -39972,8 +45719,12 @@ class SecurityPolicyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -39993,9 +45744,11 @@ def raw_page(self): class SecurityPolicyReference(proto.Message): r""" + Attributes: security_policy (str): + This field is a member of `oneof`_ ``_security_policy``. """ security_policy = proto.Field(proto.STRING, number=171082513, optional=True,) @@ -40012,20 +45765,30 @@ class SecurityPolicyRule(proto.Message): connection triggers the rule. Can currently be either "allow" or "deny()" where valid values for status are 403, 404, and 502. + + This field is a member of `oneof`_ ``_action``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. kind (str): [Output only] Type of the resource. Always compute#securityPolicyRule for security policy rules + + This field is a member of `oneof`_ ``_kind``. match (google.cloud.compute_v1.types.SecurityPolicyRuleMatcher): A match condition that incoming traffic is evaluated against. If it evaluates to true, the corresponding 'action' is enforced. + + This field is a member of `oneof`_ ``_match``. preview (bool): If set to true, the specified action is not enforced. + + This field is a member of `oneof`_ ``_preview``. priority (int): An integer indicating the priority of a rule in the list. The priority must be a positive @@ -40033,6 +45796,8 @@ class SecurityPolicyRule(proto.Message): evaluated from highest to lowest priority where 0 is the highest priority and 2147483647 is the lowest priority. + + This field is a member of `oneof`_ ``_priority``. """ action = proto.Field(proto.STRING, number=187661878, optional=True,) @@ -40058,16 +45823,22 @@ class SecurityPolicyRuleMatcher(proto.Message): versioned_expr. This field must be specified if versioned_expr is specified and cannot be specified if versioned_expr is not specified. + + This field is a member of `oneof`_ ``_config``. expr (google.cloud.compute_v1.types.Expr): User defined CEVAL expression. A CEVAL expression is used to specify match criteria such as origin.ip, source.region_code and contents in the request header. + + This field is a member of `oneof`_ ``_expr``. versioned_expr (google.cloud.compute_v1.types.SecurityPolicyRuleMatcher.VersionedExpr): Preconfigured versioned expression. If this field is specified, config must also be specified. Available preconfigured expressions along with their requirements are: SRC_IPS_V1 - must specify the corresponding src_ip_range field in config. + + This field is a member of `oneof`_ ``_versioned_expr``. """ class VersionedExpr(proto.Enum): @@ -40093,6 +45864,7 @@ class VersionedExpr(proto.Enum): class SecurityPolicyRuleMatcherConfig(proto.Message): r""" + Attributes: src_ip_ranges (Sequence[str]): CIDR IP address range. Maximum number of src_ip_ranges @@ -40115,6 +45887,8 @@ class SecuritySettings(proto.Message): the loadBalancingScheme set to INTERNAL_SELF_MANAGED. If left blank, communications are not encrypted. Note: This field currently has no impact. + + This field is a member of `oneof`_ ``_client_tls_policy``. subject_alt_names (Sequence[str]): Optional. A list of Subject Alternative Names (SANs) that the client verifies during a mutual TLS handshake with an @@ -40159,24 +45933,34 @@ class SendDiagnosticInterruptInstanceRequest(proto.Message): class SendDiagnosticInterruptInstanceResponse(proto.Message): r"""A response message for Instances.SendDiagnosticInterrupt. See the method description for details. - """ + + """ class SerialPortOutput(proto.Message): r"""An instance serial console output. + Attributes: contents (str): [Output Only] The contents of the console output. + + This field is a member of `oneof`_ ``_contents``. kind (str): [Output Only] Type of the resource. Always compute#serialPortOutput for serial port output. + + This field is a member of `oneof`_ ``_kind``. next_ (int): [Output Only] The position of the next byte of content, regardless of whether the content exists, following the output returned in the ``contents`` property. Use this value in the next request as the start parameter. + + This field is a member of `oneof`_ ``_next``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. start (int): The starting byte position of the output that was returned. This should match the start parameter sent with the request. @@ -40185,6 +45969,8 @@ class SerialPortOutput(proto.Message): output start value will indicate the byte position of the output that was returned, which might be different than the ``start`` value that was specified in the request. + + This field is a member of `oneof`_ ``_start``. """ contents = proto.Field(proto.STRING, number=506419994, optional=True,) @@ -40196,9 +45982,11 @@ class SerialPortOutput(proto.Message): class ServerBinding(proto.Message): r""" + Attributes: type_ (google.cloud.compute_v1.types.ServerBinding.Type): + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -40213,9 +46001,12 @@ class Type(proto.Enum): class ServiceAccount(proto.Message): r"""A service account. + Attributes: email (str): Email address of the service account. + + This field is a member of `oneof`_ ``_email``. scopes (Sequence[str]): The list of scopes to be made available for this service account. @@ -40241,6 +46032,8 @@ class ServiceAttachment(proto.Message): can be set to ACCEPT_AUTOMATIC. An ACCEPT_AUTOMATIC service attachment is one that always accepts the connection from consumer forwarding rules. + + This field is a member of `oneof`_ ``_connection_preference``. consumer_accept_lists (Sequence[google.cloud.compute_v1.types.ServiceAttachmentConsumerProjectLimit]): Projects that are allowed to connect to this service attachment. @@ -40250,15 +46043,21 @@ class ServiceAttachment(proto.Message): specified using its id or number. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. enable_proxy_protocol (bool): If true, enable the proxy protocol which is for supplying client TCP/IP address data in TCP connections that traverse proxies on their way to destination servers. + + This field is a member of `oneof`_ ``_enable_proxy_protocol``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -40270,12 +46069,18 @@ class ServiceAttachment(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the ServiceAttachment. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource type. The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#serviceAttachment for service attachments. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -40285,6 +46090,8 @@ class ServiceAttachment(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. nat_subnets (Sequence[str]): An array of URLs where each entry is the URL of a subnet provided by the service producer to @@ -40293,19 +46100,29 @@ class ServiceAttachment(proto.Message): The URL of a forwarding rule with loadBalancingScheme INTERNAL\* that is serving the endpoint identified by this service attachment. + + This field is a member of `oneof`_ ``_producer_forwarding_rule``. psc_service_attachment_id (google.cloud.compute_v1.types.Uint128): [Output Only] An 128-bit global unique ID of the PSC service attachment. + + This field is a member of `oneof`_ ``_psc_service_attachment_id``. region (str): [Output Only] URL of the region where the service attachment resides. This field applies only to the region resource. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. target_service (str): The URL of a service serving the endpoint identified by this service attachment. + + This field is a member of `oneof`_ ``_target_service``. """ class ConnectionPreference(proto.Enum): @@ -40352,15 +46169,20 @@ class ConnectionPreference(proto.Enum): class ServiceAttachmentAggregatedList(proto.Message): r"""Contains a list of ServiceAttachmentsScopedList. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ServiceAttachmentAggregatedList.ItemsEntry]): A list of ServiceAttachmentsScopedList resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -40368,12 +46190,18 @@ class ServiceAttachmentAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -40398,15 +46226,22 @@ def raw_page(self): class ServiceAttachmentConnectedEndpoint(proto.Message): r"""[Output Only] A connection connected to this service attachment. + Attributes: endpoint (str): The url of a connected endpoint. + + This field is a member of `oneof`_ ``_endpoint``. psc_connection_id (int): The PSC connection id of the connected endpoint. + + This field is a member of `oneof`_ ``_psc_connection_id``. status (google.cloud.compute_v1.types.ServiceAttachmentConnectedEndpoint.Status): The status of a connected endpoint to this service attachment. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -40427,12 +46262,17 @@ class Status(proto.Enum): class ServiceAttachmentConsumerProjectLimit(proto.Message): r""" + Attributes: connection_limit (int): The value of the limit to set. + + This field is a member of `oneof`_ ``_connection_limit``. project_id_or_num (str): The project id or number for the project to set the limit for. + + This field is a member of `oneof`_ ``_project_id_or_num``. """ connection_limit = proto.Field(proto.UINT32, number=131403546, optional=True,) @@ -40441,15 +46281,20 @@ class ServiceAttachmentConsumerProjectLimit(proto.Message): class ServiceAttachmentList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.ServiceAttachment]): A list of ServiceAttachment resources. kind (str): [Output Only] Type of the resource. Always compute#serviceAttachment for service attachments. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -40457,10 +46302,16 @@ class ServiceAttachmentList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -40481,6 +46332,7 @@ def raw_page(self): class ServiceAttachmentsScopedList(proto.Message): r""" + Attributes: service_attachments (Sequence[google.cloud.compute_v1.types.ServiceAttachment]): A list of ServiceAttachments contained in @@ -40488,6 +46340,8 @@ class ServiceAttachmentsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of service attachments when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ service_attachments = proto.RepeatedField( @@ -40521,6 +46375,8 @@ class SetBackendServiceTargetSslProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_ssl_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetBackendServiceRequest): The body resource for this request target_ssl_proxy (str): @@ -40561,6 +46417,8 @@ class SetBackendServiceTargetTcpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_tcp_proxies_set_backend_service_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetBackendServiceRequest): The body resource for this request target_tcp_proxy (str): @@ -40585,6 +46443,8 @@ class SetBackupTargetPoolRequest(proto.Message): Attributes: failover_ratio (float): New failoverRatio value for the target pool. + + This field is a member of `oneof`_ ``_failover_ratio``. project (str): Project ID for this request. region (str): @@ -40605,6 +46465,8 @@ class SetBackupTargetPoolRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_pool (str): Name of the TargetPool resource to set a backup pool for. @@ -40647,6 +46509,8 @@ class SetCommonInstanceMetadataProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ metadata_resource = proto.Field( @@ -40681,6 +46545,8 @@ class SetDefaultNetworkTierProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ project = proto.Field(proto.STRING, number=227560217,) @@ -40698,6 +46564,8 @@ class SetDeletionProtectionInstanceRequest(proto.Message): deletion_protection (bool): Whether the resource should be protected against deletion. + + This field is a member of `oneof`_ ``_deletion_protection``. project (str): Project ID for this request. request_id (str): @@ -40716,6 +46584,8 @@ class SetDeletionProtectionInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. zone (str): @@ -40761,6 +46631,8 @@ class SetDiskAutoDeleteInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41105,6 +46977,8 @@ class SetInstanceTemplateInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. @@ -41151,6 +47025,8 @@ class SetInstanceTemplateRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -41187,6 +47063,8 @@ class SetLabelsDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. zone (str): @@ -41251,6 +47129,8 @@ class SetLabelsForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. """ @@ -41331,6 +47211,8 @@ class SetLabelsInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41371,6 +47253,8 @@ class SetLabelsRegionDiskRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. """ @@ -41431,6 +47315,8 @@ class SetLabelsVpnGatewayRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. resource (str): Name or id of the resource for this request. """ @@ -41471,6 +47357,8 @@ class SetMachineResourcesInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41511,6 +47399,8 @@ class SetMachineTypeInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41551,6 +47441,8 @@ class SetMetadataInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41591,6 +47483,8 @@ class SetMinCpuPlatformInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41632,6 +47526,8 @@ class SetNamedPortsInstanceGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the instance group is located. @@ -41676,6 +47572,8 @@ class SetNamedPortsRegionInstanceGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group = proto.Field(proto.STRING, number=81095253,) @@ -41716,6 +47614,8 @@ class SetNodeTemplateNodeGroupRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -41754,6 +47654,8 @@ class SetPrivateIpGoogleAccessSubnetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. subnetwork (str): Name of the Subnetwork resource. subnetworks_set_private_ip_google_access_request_resource (google.cloud.compute_v1.types.SubnetworksSetPrivateIpGoogleAccessRequest): @@ -41794,6 +47696,8 @@ class SetProxyHeaderTargetSslProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_ssl_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetProxyHeaderRequest): The body resource for this request target_ssl_proxy (str): @@ -41834,6 +47738,8 @@ class SetProxyHeaderTargetTcpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_tcp_proxies_set_proxy_header_request_resource (google.cloud.compute_v1.types.TargetTcpProxiesSetProxyHeaderRequest): The body resource for this request target_tcp_proxy (str): @@ -41874,6 +47780,8 @@ class SetQuicOverrideTargetHttpsProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_https_proxies_set_quic_override_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetQuicOverrideRequest): The body resource for this request target_https_proxy (str): @@ -41917,6 +47825,8 @@ class SetSchedulingInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. scheduling_resource (google.cloud.compute_v1.types.Scheduling): The body resource for this request zone (str): @@ -41959,6 +47869,8 @@ class SetSecurityPolicyBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. security_policy_reference_resource (google.cloud.compute_v1.types.SecurityPolicyReference): The body resource for this request """ @@ -41998,6 +47910,8 @@ class SetServiceAccountInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -42038,6 +47952,8 @@ class SetShieldedInstanceIntegrityPolicyInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. shielded_instance_integrity_policy_resource (google.cloud.compute_v1.types.ShieldedInstanceIntegrityPolicy): The body resource for this request zone (str): @@ -42079,6 +47995,8 @@ class SetSslCertificatesRegionTargetHttpsProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy (str): Name of the TargetHttpsProxy resource to set an SslCertificates resource for. @@ -42118,6 +48036,8 @@ class SetSslCertificatesTargetHttpsProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_https_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetHttpsProxiesSetSslCertificatesRequest): The body resource for this request target_https_proxy (str): @@ -42158,6 +48078,8 @@ class SetSslCertificatesTargetSslProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_ssl_proxies_set_ssl_certificates_request_resource (google.cloud.compute_v1.types.TargetSslProxiesSetSslCertificatesRequest): The body resource for this request target_ssl_proxy (str): @@ -42198,6 +48120,8 @@ class SetSslPolicyTargetHttpsProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): The body resource for this request target_https_proxy (str): @@ -42237,6 +48161,8 @@ class SetSslPolicyTargetSslProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. ssl_policy_reference_resource (google.cloud.compute_v1.types.SslPolicyReference): The body resource for this request target_ssl_proxy (str): @@ -42278,6 +48204,8 @@ class SetTagsInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. tags_resource (google.cloud.compute_v1.types.Tags): The body resource for this request zone (str): @@ -42319,6 +48247,8 @@ class SetTargetForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_reference_resource (google.cloud.compute_v1.types.TargetReference): The body resource for this request """ @@ -42358,6 +48288,8 @@ class SetTargetGlobalForwardingRuleRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_reference_resource (google.cloud.compute_v1.types.TargetReference): The body resource for this request """ @@ -42397,6 +48329,8 @@ class SetTargetPoolsInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. @@ -42443,6 +48377,8 @@ class SetTargetPoolsRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -42481,6 +48417,8 @@ class SetUrlMapRegionTargetHttpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy (str): Name of the TargetHttpProxy to set a URL map for. @@ -42520,6 +48458,8 @@ class SetUrlMapRegionTargetHttpsProxyRequest(proto.Message): exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). end_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy (str): Name of the TargetHttpsProxy to set a URL map for. @@ -42559,6 +48499,8 @@ class SetUrlMapTargetHttpProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_http_proxy (str): Name of the TargetHttpProxy to set a URL map for. @@ -42597,6 +48539,8 @@ class SetUrlMapTargetHttpsProxyRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. target_https_proxy (str): Name of the TargetHttpsProxy resource whose URL map is to be set. @@ -42635,6 +48579,8 @@ class SetUsageExportBucketProjectRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. usage_export_location_resource (google.cloud.compute_v1.types.UsageExportLocation): The body resource for this request """ @@ -42648,16 +48594,23 @@ class SetUsageExportBucketProjectRequest(proto.Message): class ShieldedInstanceConfig(proto.Message): r"""A set of Shielded Instance options. + Attributes: enable_integrity_monitoring (bool): Defines whether the instance has integrity monitoring enabled. Enabled by default. + + This field is a member of `oneof`_ ``_enable_integrity_monitoring``. enable_secure_boot (bool): Defines whether the instance has Secure Boot enabled. Disabled by default. + + This field is a member of `oneof`_ ``_enable_secure_boot``. enable_vtpm (bool): Defines whether the instance has the vTPM enabled. Enabled by default. + + This field is a member of `oneof`_ ``_enable_vtpm``. """ enable_integrity_monitoring = proto.Field( @@ -42669,19 +48622,26 @@ class ShieldedInstanceConfig(proto.Message): class ShieldedInstanceIdentity(proto.Message): r"""A Shielded Instance Identity. + Attributes: encryption_key (google.cloud.compute_v1.types.ShieldedInstanceIdentityEntry): An Endorsement Key (EK) made by the RSA 2048 algorithm issued to the Shielded Instance's vTPM. + + This field is a member of `oneof`_ ``_encryption_key``. kind (str): [Output Only] Type of the resource. Always compute#shieldedInstanceIdentity for shielded Instance identity entry. + + This field is a member of `oneof`_ ``_kind``. signing_key (google.cloud.compute_v1.types.ShieldedInstanceIdentityEntry): An Attestation Key (AK) made by the RSA 2048 algorithm issued to the Shielded Instance's vTPM. + + This field is a member of `oneof`_ ``_signing_key``. """ encryption_key = proto.Field( @@ -42701,12 +48661,17 @@ class ShieldedInstanceIdentity(proto.Message): class ShieldedInstanceIdentityEntry(proto.Message): r"""A Shielded Instance Identity Entry. + Attributes: ek_cert (str): A PEM-encoded X.509 certificate. This field can be empty. + + This field is a member of `oneof`_ ``_ek_cert``. ek_pub (str): A PEM-encoded public key. + + This field is a member of `oneof`_ ``_ek_pub``. """ ek_cert = proto.Field(proto.STRING, number=450178589, optional=True,) @@ -42722,6 +48687,8 @@ class ShieldedInstanceIntegrityPolicy(proto.Message): Updates the integrity policy baseline using the measurements from the VM instance's most recent boot. + + This field is a member of `oneof`_ ``_update_auto_learn_policy``. """ update_auto_learn_policy = proto.Field(proto.BOOL, number=245490215, optional=True,) @@ -42740,10 +48707,14 @@ class SignedUrlKey(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_key_name``. key_value (str): 128-bit key value used for signing the URL. The key value must be a valid RFC 4648 Section 5 base64url encoded string. + + This field is a member of `oneof`_ ``_key_value``. """ key_name = proto.Field(proto.STRING, number=500938859, optional=True,) @@ -42777,6 +48748,8 @@ class Snapshot(proto.Message): auto_created (bool): [Output Only] Set to true if snapshots are automatically created by applying resource policy on the target disk. + + This field is a member of `oneof`_ ``_auto_created``. chain_name (str): Creates the new snapshot in the snapshot chain labeled with the specified name. The chain @@ -42787,23 +48760,37 @@ class Snapshot(proto.Message): chargeback tracking. When you describe your snapshot resource, this field is visible only if it has a non-empty value. + + This field is a member of `oneof`_ ``_chain_name``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. disk_size_gb (int): [Output Only] Size of the source disk, specified in GB. + + This field is a member of `oneof`_ ``_disk_size_gb``. download_bytes (int): [Output Only] Number of bytes downloaded to restore a snapshot to a disk. + + This field is a member of `oneof`_ ``_download_bytes``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#snapshot for Snapshot resources. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for the labels being applied to this snapshot, which is essentially a hash of @@ -42816,6 +48803,8 @@ class Snapshot(proto.Message): with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a snapshot. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.Snapshot.LabelsEntry]): Labels to apply to this snapshot. These can be later modified by the setLabels method. Label @@ -42832,6 +48821,8 @@ class Snapshot(proto.Message): snapshot close to other resources. This field is for use by internal tools that use the public API. + + This field is a member of `oneof`_ ``_location_hint``. name (str): Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, @@ -42841,10 +48832,16 @@ class Snapshot(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. satisfies_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. snapshot_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): Encrypts the snapshot using a customer- upplied encryption key. After you encrypt a @@ -42860,30 +48857,44 @@ class Snapshot(proto.Message): using an automatically generated key and you do not need to provide a key to use the snapshot later. + + This field is a member of `oneof`_ ``_snapshot_encryption_key``. source_disk (str): The source disk used to create this snapshot. + + This field is a member of `oneof`_ ``_source_disk``. source_disk_encryption_key (google.cloud.compute_v1.types.CustomerEncryptionKey): The customer-supplied encryption key of the source disk. Required if the source disk is protected by a customer-supplied encryption key. + + This field is a member of `oneof`_ ``_source_disk_encryption_key``. source_disk_id (str): [Output Only] The ID value of the disk used to create this snapshot. This value may be used to determine whether the snapshot was taken from the current or a previous instance of a given disk name. + + This field is a member of `oneof`_ ``_source_disk_id``. status (google.cloud.compute_v1.types.Snapshot.Status): [Output Only] The status of the snapshot. This can be CREATING, DELETING, FAILED, READY, or UPLOADING. + + This field is a member of `oneof`_ ``_status``. storage_bytes (int): [Output Only] A size of the storage used by the snapshot. As snapshots share storage, this number is expected to change with snapshot creation/deletion. + + This field is a member of `oneof`_ ``_storage_bytes``. storage_bytes_status (google.cloud.compute_v1.types.Snapshot.StorageBytesStatus): [Output Only] An indicator whether storageBytes is in a stable state or it is being adjusted as a result of shared storage reallocation. This status can either be UPDATING, meaning the size of the snapshot is being updated, or UP_TO_DATE, meaning the size of the snapshot is up-to-date. + + This field is a member of `oneof`_ ``_storage_bytes_status``. storage_locations (Sequence[str]): Cloud Storage bucket storage location of the snapshot (regional or multi-regional). @@ -42945,14 +48956,19 @@ class StorageBytesStatus(proto.Enum): class SnapshotList(proto.Message): r"""Contains a list of Snapshot resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Snapshot]): A list of Snapshot resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -42960,10 +48976,16 @@ class SnapshotList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -43021,23 +49043,37 @@ class SslCertificate(proto.Message): format. The certificate chain must be no greater than 5 certs long. The chain must include at least one intermediate cert. + + This field is a member of `oneof`_ ``_certificate``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. expire_time (str): [Output Only] Expire time of the certificate. RFC3339 + + This field is a member of `oneof`_ ``_expire_time``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#sslCertificate for SSL certificates. + + This field is a member of `oneof`_ ``_kind``. managed (google.cloud.compute_v1.types.SslCertificateManagedSslCertificate): Configuration and status of a managed SSL certificate. + + This field is a member of `oneof`_ ``_managed``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -43047,20 +49083,30 @@ class SslCertificate(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. private_key (str): A value read into memory from a write-only private key file. The private key file must be in PEM format. For security, only insert requests include this field. + + This field is a member of `oneof`_ ``_private_key``. region (str): [Output Only] URL of the region where the regional SSL Certificate resides. This field is not applicable to global SSL Certificate. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. self_managed (google.cloud.compute_v1.types.SslCertificateSelfManagedSslCertificate): Configuration and status of a self-managed SSL certificate. + + This field is a member of `oneof`_ ``_self_managed``. subject_alternative_names (Sequence[str]): [Output Only] Domains associated with the certificate via Subject Alternative Name. @@ -43069,6 +49115,8 @@ class SslCertificate(proto.Message): "SELF_MANAGED" or "MANAGED". If not specified, the certificate is self-managed and the fields certificate and private_key are used. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -43109,10 +49157,13 @@ class Type(proto.Enum): class SslCertificateAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.SslCertificateAggregatedList.ItemsEntry]): A list of SslCertificatesScopedList resources. @@ -43120,6 +49171,8 @@ class SslCertificateAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#sslCertificateAggregatedList for lists of SSL Certificates. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -43127,12 +49180,18 @@ class SslCertificateAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -43157,14 +49216,19 @@ def raw_page(self): class SslCertificateList(proto.Message): r"""Contains a list of SslCertificate resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.SslCertificate]): A list of SslCertificate resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -43172,10 +49236,16 @@ class SslCertificateList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -43196,6 +49266,7 @@ def raw_page(self): class SslCertificateManagedSslCertificate(proto.Message): r"""Configuration and status of a managed SSL certificate. + Attributes: domain_status (Sequence[google.cloud.compute_v1.types.SslCertificateManagedSslCertificate.DomainStatusEntry]): [Output only] Detailed statuses of the domains specified for @@ -43207,6 +49278,8 @@ class SslCertificateManagedSslCertificate(proto.Message): certificate `__. status (google.cloud.compute_v1.types.SslCertificateManagedSslCertificate.Status): [Output only] Status of the managed certificate resource. + + This field is a member of `oneof`_ ``_status``. """ class Status(proto.Enum): @@ -43226,15 +49299,20 @@ class Status(proto.Enum): class SslCertificateSelfManagedSslCertificate(proto.Message): r"""Configuration and status of a self-managed SSL certificate. + Attributes: certificate (str): A local certificate file. The certificate must be in PEM format. The certificate chain must be no greater than 5 certs long. The chain must include at least one intermediate cert. + + This field is a member of `oneof`_ ``_certificate``. private_key (str): A write-only private key in PEM format. Only insert requests will include this field. + + This field is a member of `oneof`_ ``_private_key``. """ certificate = proto.Field(proto.STRING, number=341787031, optional=True,) @@ -43243,6 +49321,7 @@ class SslCertificateSelfManagedSslCertificate(proto.Message): class SslCertificatesScopedList(proto.Message): r""" + Attributes: ssl_certificates (Sequence[google.cloud.compute_v1.types.SslCertificate]): List of SslCertificates contained in this @@ -43250,6 +49329,8 @@ class SslCertificatesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ ssl_certificates = proto.RepeatedField( @@ -43262,15 +49343,20 @@ class SslCertificatesScopedList(proto.Message): class SslPoliciesList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.SslPolicy]): A list of SslPolicy resources. kind (str): [Output Only] Type of the resource. Always compute#sslPoliciesList for lists of sslPolicies. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -43278,10 +49364,16 @@ class SslPoliciesList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -43300,6 +49392,7 @@ def raw_page(self): class SslPoliciesListAvailableFeaturesResponse(proto.Message): r""" + Attributes: features (Sequence[str]): @@ -43317,6 +49410,8 @@ class SslPolicy(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. custom_features (Sequence[str]): A list of features enabled when the selected profile is CUSTOM. The method returns the set of @@ -43327,6 +49422,8 @@ class SslPolicy(proto.Message): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. enabled_features (Sequence[str]): [Output Only] The list of features enabled in the SSL policy. @@ -43340,16 +49437,24 @@ class SslPolicy(proto.Message): fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an SslPolicy. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output only] Type of the resource. Always compute#sslPolicyfor SSL policies. + + This field is a member of `oneof`_ ``_kind``. min_tls_version (google.cloud.compute_v1.types.SslPolicy.MinTlsVersion): The minimum version of SSL protocol that can be used by the clients to establish a connection with the load balancer. This can be one of TLS_1_0, TLS_1_1, TLS_1_2. + + This field is a member of `oneof`_ ``_min_tls_version``. name (str): Name of the resource. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 @@ -43358,6 +49463,8 @@ class SslPolicy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. profile (google.cloud.compute_v1.types.SslPolicy.Profile): Profile specifies the set of SSL features that can be used by the load balancer when @@ -43365,8 +49472,12 @@ class SslPolicy(proto.Message): COMPATIBLE, MODERN, RESTRICTED, or CUSTOM. If using CUSTOM, the set of SSL features to enable must be specified in the customFeatures field. + + This field is a member of `oneof`_ ``_profile``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. warnings (Sequence[google.cloud.compute_v1.types.Warnings]): [Output Only] If potential misconfigurations are detected for this SSL policy, this field will be populated with @@ -43414,11 +49525,14 @@ class Profile(proto.Enum): class SslPolicyReference(proto.Message): r""" + Attributes: ssl_policy (str): URL of the SSL policy resource. Set this to empty string to clear any existing SSL policy associated with the target proxy resource. + + This field is a member of `oneof`_ ``_ssl_policy``. """ ssl_policy = proto.Field(proto.STRING, number=295190213, optional=True,) @@ -43449,6 +49563,8 @@ class StartInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -43486,6 +49602,8 @@ class StartWithEncryptionKeyInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -43503,9 +49621,11 @@ class StartWithEncryptionKeyInstanceRequest(proto.Message): class StatefulPolicy(proto.Message): r""" + Attributes: preserved_state (google.cloud.compute_v1.types.StatefulPolicyPreservedState): + This field is a member of `oneof`_ ``_preserved_state``. """ preserved_state = proto.Field( @@ -43518,6 +49638,7 @@ class StatefulPolicy(proto.Message): class StatefulPolicyPreservedState(proto.Message): r"""Configuration of preserved resources. + Attributes: disks (Sequence[google.cloud.compute_v1.types.StatefulPolicyPreservedState.DisksEntry]): Disks created on the instances that will be @@ -43535,6 +49656,7 @@ class StatefulPolicyPreservedState(proto.Message): class StatefulPolicyPreservedStateDiskDevice(proto.Message): r""" + Attributes: auto_delete (google.cloud.compute_v1.types.StatefulPolicyPreservedStateDiskDevice.AutoDelete): These stateful disks will never be deleted during @@ -43543,6 +49665,8 @@ class StatefulPolicyPreservedStateDiskDevice(proto.Message): after it is no longer used by the group, e.g. when the given instance or the whole group is deleted. Note: disks attached in READ_ONLY mode cannot be auto-deleted. + + This field is a member of `oneof`_ ``_auto_delete``. """ class AutoDelete(proto.Enum): @@ -43587,6 +49711,8 @@ class StopInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -43607,20 +49733,28 @@ class Subnetwork(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. This field can be set only at resource creation time. + + This field is a member of `oneof`_ ``_description``. enable_flow_logs (bool): Whether to enable flow logging for this subnetwork. If this field is not explicitly set, it will not appear in get listings. If not set the default behavior is to disable flow logging. This field isn't supported with the purpose field set to INTERNAL_HTTPS_LOAD_BALANCER. + + This field is a member of `oneof`_ ``_enable_flow_logs``. external_ipv6_prefix (str): [Output Only] The range of external IPv6 addresses that are owned by this subnetwork. + + This field is a member of `oneof`_ ``_external_ipv6_prefix``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -43631,12 +49765,18 @@ class Subnetwork(proto.Message): will fail with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a Subnetwork. + + This field is a member of `oneof`_ ``_fingerprint``. gateway_address (str): [Output Only] The gateway address for default routes to reach destination addresses outside this subnetwork. + + This field is a member of `oneof`_ ``_gateway_address``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. ip_cidr_range (str): The range of internal addresses that are owned by this subnetwork. Provide this property @@ -43648,22 +49788,32 @@ class Subnetwork(proto.Message): range listed in the Valid ranges list. The range can be expanded after creation using expandIpCidrRange. + + This field is a member of `oneof`_ ``_ip_cidr_range``. ipv6_access_type (google.cloud.compute_v1.types.Subnetwork.Ipv6AccessType): The access type of IPv6 address this subnet holds. It's immutable and can only be specified during creation or the first time the subnet is updated into IPV4_IPV6 dual stack. If the ipv6_type is EXTERNAL then this subnet cannot enable direct path. + + This field is a member of `oneof`_ ``_ipv6_access_type``. ipv6_cidr_range (str): [Output Only] The range of internal IPv6 addresses that are owned by this subnetwork. + + This field is a member of `oneof`_ ``_ipv6_cidr_range``. kind (str): [Output Only] Type of the resource. Always compute#subnetwork for Subnetwork resources. + + This field is a member of `oneof`_ ``_kind``. log_config (google.cloud.compute_v1.types.SubnetworkLogConfig): This field denotes the VPC flow logging options for this subnetwork. If logging is enabled, logs are exported to Cloud Logging. + + This field is a member of `oneof`_ ``_log_config``. name (str): The name of the resource, provided by the client when initially creating the resource. The name must be 1-63 @@ -43673,17 +49823,23 @@ class Subnetwork(proto.Message): first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (str): The URL of the network to which this subnetwork belongs, provided by the client when initially creating the subnetwork. This field can be set only at resource creation time. + + This field is a member of `oneof`_ ``_network``. private_ip_google_access (bool): Whether the VMs in this subnet can access Google services without assigned external IP addresses. This field can be both set at resource creation time and updated using setPrivateIpGoogleAccess. + + This field is a member of `oneof`_ ``_private_ip_google_access``. private_ipv6_google_access (google.cloud.compute_v1.types.Subnetwork.PrivateIpv6GoogleAccess): The private IPv6 google access type for the VMs in this subnet. This is an expanded field of @@ -43691,6 +49847,8 @@ class Subnetwork(proto.Message): privateIpv6GoogleAccess will take priority. This field can be both set at resource creation time and updated using patch. + + This field is a member of `oneof`_ ``_private_ipv6_google_access``. purpose (google.cloud.compute_v1.types.Subnetwork.Purpose): The purpose of the resource. This field can be either PRIVATE_RFC_1918 or INTERNAL_HTTPS_LOAD_BALANCER. A @@ -43700,10 +49858,14 @@ class Subnetwork(proto.Message): to PRIVATE_RFC_1918. The enableFlowLogs field isn't supported with the purpose field set to INTERNAL_HTTPS_LOAD_BALANCER. + + This field is a member of `oneof`_ ``_purpose``. region (str): URL of the region where the Subnetwork resides. This field can be set only at resource creation time. + + This field is a member of `oneof`_ ``_region``. role (google.cloud.compute_v1.types.Subnetwork.Role): The role of subnetwork. Currently, this field is only used when purpose = INTERNAL_HTTPS_LOAD_BALANCER. The value can @@ -43712,6 +49874,8 @@ class Subnetwork(proto.Message): A BACKUP subnetwork is one that is ready to be promoted to ACTIVE or is currently draining. This field can be updated with a patch request. + + This field is a member of `oneof`_ ``_role``. secondary_ip_ranges (Sequence[google.cloud.compute_v1.types.SubnetworkSecondaryRange]): An array of configurations for secondary IP ranges for VM instances contained in this @@ -43722,11 +49886,15 @@ class Subnetwork(proto.Message): updated with a patch request. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. stack_type (google.cloud.compute_v1.types.Subnetwork.StackType): The stack type for this subnet to identify whether the IPv6 feature is enabled or not. If not specified IPV4_ONLY will be used. This field can be both set at resource creation time and updated using patch. + + This field is a member of `oneof`_ ``_stack_type``. state (google.cloud.compute_v1.types.Subnetwork.State): [Output Only] The state of the subnetwork, which can be one of the following values: READY: Subnetwork is created and @@ -43735,6 +49903,8 @@ class Subnetwork(proto.Message): indicates that connections to the load balancer are being drained. A subnetwork that is draining cannot be used or modified until it reaches a status of READY + + This field is a member of `oneof`_ ``_state``. """ class Ipv6AccessType(proto.Enum): @@ -43846,16 +50016,21 @@ class State(proto.Enum): class SubnetworkAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.SubnetworkAggregatedList.ItemsEntry]): A list of SubnetworksScopedList resources. kind (str): [Output Only] Type of resource. Always compute#subnetworkAggregatedList for aggregated lists of subnetworks. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -43863,12 +50038,18 @@ class SubnetworkAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -43890,15 +50071,20 @@ def raw_page(self): class SubnetworkList(proto.Message): r"""Contains a list of Subnetwork resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Subnetwork]): A list of Subnetwork resources. kind (str): [Output Only] Type of resource. Always compute#subnetworkList for lists of subnetworks. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -43906,10 +50092,16 @@ class SubnetworkList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -43928,6 +50120,7 @@ def raw_page(self): class SubnetworkLogConfig(proto.Message): r"""The available logging options for this subnetwork. + Attributes: aggregation_interval (google.cloud.compute_v1.types.SubnetworkLogConfig.AggregationInterval): Can only be specified if VPC flow logging for @@ -43937,15 +50130,21 @@ class SubnetworkLogConfig(proto.Message): amount of generated flow logs for long lasting connections. Default is an interval of 5 seconds per connection. + + This field is a member of `oneof`_ ``_aggregation_interval``. enable (bool): Whether to enable flow logging for this subnetwork. If this field is not explicitly set, it will not appear in get listings. If not set the default behavior is to disable flow logging. + + This field is a member of `oneof`_ ``_enable``. filter_expr (str): Can only be specified if VPC flow logs for this subnetwork is enabled. Export filter used to define which VPC flow logs should be logged. + + This field is a member of `oneof`_ ``_filter_expr``. flow_sampling (float): Can only be specified if VPC flow logging for this subnetwork is enabled. The value of the field must be in [0, @@ -43953,11 +50152,15 @@ class SubnetworkLogConfig(proto.Message): subnetwork where 1.0 means all collected logs are reported and 0.0 means no logs are reported. Default is 0.5, which means half of all collected logs are reported. + + This field is a member of `oneof`_ ``_flow_sampling``. metadata (google.cloud.compute_v1.types.SubnetworkLogConfig.Metadata): Can only be specified if VPC flow logs for this subnetwork is enabled. Configures whether all, none or a subset of metadata fields should be added to the reported VPC flow logs. Default is EXCLUDE_ALL_METADATA. + + This field is a member of `oneof`_ ``_metadata``. metadata_fields (Sequence[str]): Can only be specified if VPC flow logs for this subnetwork is enabled and "metadata" was set to CUSTOM_METADATA. @@ -44001,6 +50204,7 @@ class Metadata(proto.Enum): class SubnetworkSecondaryRange(proto.Message): r"""Represents a secondary IP range of a subnetwork. + Attributes: ip_cidr_range (str): The range of IP addresses belonging to this @@ -44010,12 +50214,16 @@ class SubnetworkSecondaryRange(proto.Message): primary and secondary IP ranges within a network. Only IPv4 is supported. The range can be any range listed in the Valid ranges list. + + This field is a member of `oneof`_ ``_ip_cidr_range``. range_name (str): The name associated with this subnetwork secondary range, used when adding an alias IP range to a VM instance. The name must be 1-63 characters long, and comply with RFC1035. The name must be unique within the subnetwork. + + This field is a member of `oneof`_ ``_range_name``. """ ip_cidr_range = proto.Field(proto.STRING, number=98117322, optional=True,) @@ -44024,6 +50232,7 @@ class SubnetworkSecondaryRange(proto.Message): class SubnetworksExpandIpCidrRangeRequest(proto.Message): r""" + Attributes: ip_cidr_range (str): The IP (in CIDR format or netmask) of @@ -44033,6 +50242,8 @@ class SubnetworksExpandIpCidrRangeRequest(proto.Message): range can only be larger than (i.e. a superset of) the range previously defined before the update. + + This field is a member of `oneof`_ ``_ip_cidr_range``. """ ip_cidr_range = proto.Field(proto.STRING, number=98117322, optional=True,) @@ -44040,6 +50251,7 @@ class SubnetworksExpandIpCidrRangeRequest(proto.Message): class SubnetworksScopedList(proto.Message): r""" + Attributes: subnetworks (Sequence[google.cloud.compute_v1.types.Subnetwork]): A list of subnetworks contained in this @@ -44047,6 +50259,8 @@ class SubnetworksScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): An informational warning that appears when the list of addresses is empty. + + This field is a member of `oneof`_ ``_warning``. """ subnetworks = proto.RepeatedField( @@ -44059,9 +50273,11 @@ class SubnetworksScopedList(proto.Message): class SubnetworksSetPrivateIpGoogleAccessRequest(proto.Message): r""" + Attributes: private_ip_google_access (bool): + This field is a member of `oneof`_ ``_private_ip_google_access``. """ private_ip_google_access = proto.Field(proto.BOOL, number=421491790, optional=True,) @@ -44075,6 +50291,7 @@ class Subsetting(proto.Message): Attributes: policy (google.cloud.compute_v1.types.Subsetting.Policy): + This field is a member of `oneof`_ ``_policy``. """ class Policy(proto.Enum): @@ -44111,6 +50328,8 @@ class SwitchToCustomModeNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network = proto.Field(proto.STRING, number=232872494,) @@ -44120,14 +50339,19 @@ class SwitchToCustomModeNetworkRequest(proto.Message): class TCPHealthCheck(proto.Message): r""" + Attributes: port (int): The TCP port number for the health check request. The default value is 80. Valid values are 1 through 65535. + + This field is a member of `oneof`_ ``_port``. port_name (str): Port name as defined in InstanceGroup#NamedPort#name. If both port and port_name are defined, port takes precedence. + + This field is a member of `oneof`_ ``_port_name``. port_specification (google.cloud.compute_v1.types.TCPHealthCheck.PortSpecification): Specifies how port is selected for health checking, can be one of following values: USE_FIXED_PORT: The port number in @@ -44138,10 +50362,14 @@ class TCPHealthCheck(proto.Message): the port or named port specified in the Backend Service is used for health checking. If not specified, TCP health check follows behavior specified in port and portName fields. + + This field is a member of `oneof`_ ``_port_specification``. proxy_header (google.cloud.compute_v1.types.TCPHealthCheck.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. request (str): The application data to send once the TCP connection has been established (default value @@ -44149,11 +50377,15 @@ class TCPHealthCheck(proto.Message): empty, the connection establishment alone will indicate health. The request data can only be ASCII. + + This field is a member of `oneof`_ ``_request``. response (str): The bytes to match against the beginning of the response data. If left empty (the default value), any response will indicate health. The response data can only be ASCII. + + This field is a member of `oneof`_ ``_response``. """ class PortSpecification(proto.Enum): @@ -44193,6 +50425,7 @@ class ProxyHeader(proto.Enum): class Tags(proto.Message): r"""A set of instance tags. + Attributes: fingerprint (str): Specifies a fingerprint for this request, @@ -44204,6 +50437,8 @@ class Tags(proto.Message): to-date fingerprint hash in order to update or change tags. To see the latest fingerprint, make get() request to the instance. + + This field is a member of `oneof`_ ``_fingerprint``. items (Sequence[str]): An array of tags. Each tag must be 1-63 characters long, and comply with RFC1035. @@ -44224,10 +50459,14 @@ class TargetGrpcProxy(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -44239,12 +50478,18 @@ class TargetGrpcProxy(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the TargetGrpcProxy. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource type. The server generates this identifier. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#targetGrpcProxy for target grpc proxies. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -44254,15 +50499,23 @@ class TargetGrpcProxy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. self_link_with_id (str): [Output Only] Server-defined URL with id for the resource. + + This field is a member of `oneof`_ ``_self_link_with_id``. url_map (str): URL to the UrlMap resource that defines the mapping from URL to the BackendService. The protocol field in the BackendService must be set to GRPC. + + This field is a member of `oneof`_ ``_url_map``. validate_for_proxyless (bool): If true, indicates that the BackendServices referenced by the urlMap may be accessed by gRPC @@ -44277,6 +50530,8 @@ class TargetGrpcProxy(proto.Message): a sidecar proxy. In this case, a gRPC application must not use "xds:///" scheme in the target URI of the service it is connecting to + + This field is a member of `oneof`_ ``_validate_for_proxyless``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -44293,15 +50548,20 @@ class TargetGrpcProxy(proto.Message): class TargetGrpcProxyList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetGrpcProxy]): A list of TargetGrpcProxy resources. kind (str): [Output Only] Type of the resource. Always compute#targetGrpcProxy for target grpc proxies. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44309,10 +50569,16 @@ class TargetGrpcProxyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -44333,6 +50599,7 @@ def raw_page(self): class TargetHttpProxiesScopedList(proto.Message): r""" + Attributes: target_http_proxies (Sequence[google.cloud.compute_v1.types.TargetHttpProxy]): A list of TargetHttpProxies contained in this @@ -44340,6 +50607,8 @@ class TargetHttpProxiesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ target_http_proxies = proto.RepeatedField( @@ -44365,10 +50634,14 @@ class TargetHttpProxy(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -44380,12 +50653,18 @@ class TargetHttpProxy(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the TargetHttpProxy. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#targetHttpProxy for target HTTP proxies. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -44395,6 +50674,8 @@ class TargetHttpProxy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. proxy_bind (bool): This field only applies when the forwarding rule that references this target proxy has a loadBalancingScheme set @@ -44406,15 +50687,23 @@ class TargetHttpProxy(proto.Message): words, not a sidecar proxy). The Envoy proxy listens for inbound requests and handles requests when it receives them. The default is false. + + This field is a member of `oneof`_ ``_proxy_bind``. region (str): [Output Only] URL of the region where the regional Target HTTP Proxy resides. This field is not applicable to global Target HTTP Proxies. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. url_map (str): URL to the UrlMap resource that defines the mapping from URL to the BackendService. + + This field is a member of `oneof`_ ``_url_map``. """ creation_timestamp = proto.Field(proto.STRING, number=30525366, optional=True,) @@ -44431,10 +50720,13 @@ class TargetHttpProxy(proto.Message): class TargetHttpProxyAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetHttpProxyAggregatedList.ItemsEntry]): A list of TargetHttpProxiesScopedList resources. @@ -44442,6 +50734,8 @@ class TargetHttpProxyAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#targetHttpProxyAggregatedList for lists of Target HTTP Proxies. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44449,8 +50743,12 @@ class TargetHttpProxyAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. """ @@ -44474,16 +50772,21 @@ def raw_page(self): class TargetHttpProxyList(proto.Message): r"""A list of TargetHttpProxy resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetHttpProxy]): A list of TargetHttpProxy resources. kind (str): Type of resource. Always compute#targetHttpProxyList for lists of target HTTP proxies. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44491,10 +50794,16 @@ class TargetHttpProxyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -44515,6 +50824,7 @@ def raw_page(self): class TargetHttpsProxiesScopedList(proto.Message): r""" + Attributes: target_https_proxies (Sequence[google.cloud.compute_v1.types.TargetHttpsProxy]): A list of TargetHttpsProxies contained in @@ -44522,6 +50832,8 @@ class TargetHttpsProxiesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ target_https_proxies = proto.RepeatedField( @@ -44534,10 +50846,13 @@ class TargetHttpsProxiesScopedList(proto.Message): class TargetHttpsProxiesSetQuicOverrideRequest(proto.Message): r""" + Attributes: quic_override (google.cloud.compute_v1.types.TargetHttpsProxiesSetQuicOverrideRequest.QuicOverride): QUIC policy for the TargetHttpsProxy resource. + + This field is a member of `oneof`_ ``_quic_override``. """ class QuicOverride(proto.Enum): @@ -44554,6 +50869,7 @@ class QuicOverride(proto.Enum): class TargetHttpsProxiesSetSslCertificatesRequest(proto.Message): r""" + Attributes: ssl_certificates (Sequence[str]): New set of SslCertificate resources to @@ -44589,12 +50905,18 @@ class TargetHttpsProxy(proto.Message): global TargetHttpsProxy attached to globalForwardingRules with the loadBalancingScheme set to INTERNAL_SELF_MANAGED. Note: This field currently has no impact. + + This field is a member of `oneof`_ ``_authorization_policy``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -44606,12 +50928,18 @@ class TargetHttpsProxy(proto.Message): conditionNotMet. To see the latest fingerprint, make a get() request to retrieve the TargetHttpsProxy. + + This field is a member of `oneof`_ ``_fingerprint``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#targetHttpsProxy for target HTTPS proxies. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -44621,6 +50949,8 @@ class TargetHttpsProxy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. proxy_bind (bool): This field only applies when the forwarding rule that references this target proxy has a loadBalancingScheme set @@ -44632,6 +50962,8 @@ class TargetHttpsProxy(proto.Message): words, not a sidecar proxy). The Envoy proxy listens for inbound requests and handles requests when it receives them. The default is false. + + This field is a member of `oneof`_ ``_proxy_bind``. quic_override (google.cloud.compute_v1.types.TargetHttpsProxy.QuicOverride): Specifies the QUIC override policy for this TargetHttpsProxy resource. This setting @@ -44644,12 +50976,18 @@ class TargetHttpsProxy(proto.Message): quic-override is set to DISABLE, the load balancer doesn't use QUIC. - If the quic- override flag is not specified, NONE is implied. + + This field is a member of `oneof`_ ``_quic_override``. region (str): [Output Only] URL of the region where the regional TargetHttpsProxy resides. This field is not applicable to global TargetHttpsProxies. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. server_tls_policy (str): Optional. A URL referring to a networksecurity.ServerTlsPolicy resource that describes how @@ -44659,6 +50997,8 @@ class TargetHttpsProxy(proto.Message): loadBalancingScheme set to INTERNAL_SELF_MANAGED. If left blank, communications are not encrypted. Note: This field currently has no impact. + + This field is a member of `oneof`_ ``_server_tls_policy``. ssl_certificates (Sequence[str]): URLs to SslCertificate resources that are used to authenticate connections between users and the load @@ -44671,6 +51011,8 @@ class TargetHttpsProxy(proto.Message): associated with the TargetHttpsProxy resource. If not set, the TargetHttpsProxy resource has no SSL policy configured. + + This field is a member of `oneof`_ ``_ssl_policy``. url_map (str): A fully-qualified or valid partial URL to the UrlMap resource that defines the mapping from @@ -44680,6 +51022,8 @@ class TargetHttpsProxy(proto.Message): https://www.googleapis.compute/v1/projects/project/global/urlMaps/ url-map - projects/project/global/urlMaps/url- map - global/urlMaps/url-map + + This field is a member of `oneof`_ ``_url_map``. """ class QuicOverride(proto.Enum): @@ -44718,10 +51062,13 @@ class QuicOverride(proto.Enum): class TargetHttpsProxyAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetHttpsProxyAggregatedList.ItemsEntry]): A list of TargetHttpsProxiesScopedList resources. @@ -44729,6 +51076,8 @@ class TargetHttpsProxyAggregatedList(proto.Message): [Output Only] Type of resource. Always compute#targetHttpsProxyAggregatedList for lists of Target HTTP Proxies. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44736,12 +51085,18 @@ class TargetHttpsProxyAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -44766,16 +51121,21 @@ def raw_page(self): class TargetHttpsProxyList(proto.Message): r"""Contains a list of TargetHttpsProxy resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetHttpsProxy]): A list of TargetHttpsProxy resources. kind (str): Type of resource. Always compute#targetHttpsProxyList for lists of target HTTPS proxies. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44783,10 +51143,16 @@ class TargetHttpsProxyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -44815,13 +51181,19 @@ class TargetInstance(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. instance (str): A URL to the virtual machine instance that handles traffic for this target instance. When @@ -44833,9 +51205,13 @@ class TargetInstance(proto.Message): /instances/instance - projects/project/zones/zone/instances/instance - zones/zone/instances/instance + + This field is a member of `oneof`_ ``_instance``. kind (str): [Output Only] The type of the resource. Always compute#targetInstance for target instances. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -44845,21 +51221,31 @@ class TargetInstance(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. nat_policy (google.cloud.compute_v1.types.TargetInstance.NatPolicy): NAT option controlling how IPs are NAT'ed to the instance. Currently only NO_NAT (default value) is supported. + + This field is a member of `oneof`_ ``_nat_policy``. network (str): The URL of the network this target instance uses to forward traffic. If not specified, the traffic will be forwarded to the network that the default network interface belongs to. + + This field is a member of `oneof`_ ``_network``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. zone (str): [Output Only] URL of the zone where the target instance resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_zone``. """ class NatPolicy(proto.Enum): @@ -44885,14 +51271,19 @@ class NatPolicy(proto.Enum): class TargetInstanceAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetInstanceAggregatedList.ItemsEntry]): A list of TargetInstance resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44900,12 +51291,18 @@ class TargetInstanceAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -44930,14 +51327,19 @@ def raw_page(self): class TargetInstanceList(proto.Message): r"""Contains a list of TargetInstance resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetInstance]): A list of TargetInstance resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -44945,10 +51347,16 @@ class TargetInstanceList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -44969,6 +51377,7 @@ def raw_page(self): class TargetInstancesScopedList(proto.Message): r""" + Attributes: target_instances (Sequence[google.cloud.compute_v1.types.TargetInstance]): A list of target instances contained in this @@ -44976,6 +51385,8 @@ class TargetInstancesScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ target_instances = proto.RepeatedField( @@ -45009,12 +51420,18 @@ class TargetPool(proto.Message): pool in the "force" mode, where traffic will be spread to the healthy instances with the best effort, or to all instances when no instance is healthy. + + This field is a member of `oneof`_ ``_backup_pool``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. failover_ratio (float): This field is applicable only when the containing target pool is serving a forwarding rule as the primary pool (i.e., @@ -45030,6 +51447,8 @@ class TargetPool(proto.Message): mode, where traffic will be spread to the healthy instances with the best effort, or to all instances when no instance is healthy. + + This field is a member of `oneof`_ ``_failover_ratio``. health_checks (Sequence[str]): The URL of the HttpHealthCheck resource. A member instance in this pool is considered @@ -45039,6 +51458,8 @@ class TargetPool(proto.Message): id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. instances (Sequence[str]): A list of resource URLs to the virtual machine instances serving this pool. They must @@ -45047,6 +51468,8 @@ class TargetPool(proto.Message): kind (str): [Output Only] Type of the resource. Always compute#targetPool for target pools. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -45056,11 +51479,17 @@ class TargetPool(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. region (str): [Output Only] URL of the region where the target pool resides. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. session_affinity (google.cloud.compute_v1.types.TargetPool.SessionAffinity): Session affinity option, must be one of the following values: NONE: Connections from the same client IP may go to @@ -45070,6 +51499,8 @@ class TargetPool(proto.Message): Connections from the same client IP with the same IP protocol will go to the same instance in the pool while that instance remains healthy. + + This field is a member of `oneof`_ ``_session_affinity``. """ class SessionAffinity(proto.Enum): @@ -45109,16 +51540,21 @@ class SessionAffinity(proto.Enum): class TargetPoolAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetPoolAggregatedList.ItemsEntry]): A list of TargetPool resources. kind (str): [Output Only] Type of resource. Always compute#targetPoolAggregatedList for aggregated lists of target pools. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -45126,12 +51562,18 @@ class TargetPoolAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -45153,6 +51595,7 @@ def raw_page(self): class TargetPoolInstanceHealth(proto.Message): r""" + Attributes: health_status (Sequence[google.cloud.compute_v1.types.HealthStatus]): @@ -45160,6 +51603,8 @@ class TargetPoolInstanceHealth(proto.Message): [Output Only] Type of resource. Always compute#targetPoolInstanceHealth when checking the health of an instance. + + This field is a member of `oneof`_ ``_kind``. """ health_status = proto.RepeatedField( @@ -45170,15 +51615,20 @@ class TargetPoolInstanceHealth(proto.Message): class TargetPoolList(proto.Message): r"""Contains a list of TargetPool resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetPool]): A list of TargetPool resources. kind (str): [Output Only] Type of resource. Always compute#targetPoolList for lists of target pools. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -45186,10 +51636,16 @@ class TargetPoolList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -45208,6 +51664,7 @@ def raw_page(self): class TargetPoolsAddHealthCheckRequest(proto.Message): r""" + Attributes: health_checks (Sequence[google.cloud.compute_v1.types.HealthCheckReference]): The HttpHealthCheck to add to the target @@ -45221,6 +51678,7 @@ class TargetPoolsAddHealthCheckRequest(proto.Message): class TargetPoolsAddInstanceRequest(proto.Message): r""" + Attributes: instances (Sequence[google.cloud.compute_v1.types.InstanceReference]): A full or partial URL to an instance to add @@ -45241,6 +51699,7 @@ class TargetPoolsAddInstanceRequest(proto.Message): class TargetPoolsRemoveHealthCheckRequest(proto.Message): r""" + Attributes: health_checks (Sequence[google.cloud.compute_v1.types.HealthCheckReference]): Health check URL to be removed. This can be a @@ -45259,6 +51718,7 @@ class TargetPoolsRemoveHealthCheckRequest(proto.Message): class TargetPoolsRemoveInstanceRequest(proto.Message): r""" + Attributes: instances (Sequence[google.cloud.compute_v1.types.InstanceReference]): URLs of the instances to be removed from @@ -45272,6 +51732,7 @@ class TargetPoolsRemoveInstanceRequest(proto.Message): class TargetPoolsScopedList(proto.Message): r""" + Attributes: target_pools (Sequence[google.cloud.compute_v1.types.TargetPool]): A list of target pools contained in this @@ -45279,6 +51740,8 @@ class TargetPoolsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ target_pools = proto.RepeatedField( @@ -45291,9 +51754,11 @@ class TargetPoolsScopedList(proto.Message): class TargetReference(proto.Message): r""" + Attributes: target (str): + This field is a member of `oneof`_ ``_target``. """ target = proto.Field(proto.STRING, number=192835985, optional=True,) @@ -45301,10 +51766,13 @@ class TargetReference(proto.Message): class TargetSslProxiesSetBackendServiceRequest(proto.Message): r""" + Attributes: service (str): The URL of the new BackendService resource for the targetSslProxy. + + This field is a member of `oneof`_ ``_service``. """ service = proto.Field(proto.STRING, number=373540533, optional=True,) @@ -45312,10 +51780,13 @@ class TargetSslProxiesSetBackendServiceRequest(proto.Message): class TargetSslProxiesSetProxyHeaderRequest(proto.Message): r""" + Attributes: proxy_header (google.cloud.compute_v1.types.TargetSslProxiesSetProxyHeaderRequest.ProxyHeader): The new type of proxy header to append before sending data to the backend. NONE or PROXY_V1 are allowed. + + This field is a member of `oneof`_ ``_proxy_header``. """ class ProxyHeader(proto.Enum): @@ -45333,6 +51804,7 @@ class ProxyHeader(proto.Enum): class TargetSslProxiesSetSslCertificatesRequest(proto.Message): r""" + Attributes: ssl_certificates (Sequence[str]): New set of URLs to SslCertificate resources @@ -45355,16 +51827,24 @@ class TargetSslProxy(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#targetSslProxy for target SSL proxies. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -45374,14 +51854,22 @@ class TargetSslProxy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. proxy_header (google.cloud.compute_v1.types.TargetSslProxy.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. service (str): URL to the BackendService resource. + + This field is a member of `oneof`_ ``_service``. ssl_certificates (Sequence[str]): URLs to SslCertificate resources that are used to authenticate connections to Backends. At least one SSL @@ -45393,6 +51881,8 @@ class TargetSslProxy(proto.Message): associated with the TargetSslProxy resource. If not set, the TargetSslProxy resource will not have any SSL policy configured. + + This field is a member of `oneof`_ ``_ssl_policy``. """ class ProxyHeader(proto.Enum): @@ -45419,14 +51909,19 @@ class ProxyHeader(proto.Enum): class TargetSslProxyList(proto.Message): r"""Contains a list of TargetSslProxy resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetSslProxy]): A list of TargetSslProxy resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -45434,10 +51929,16 @@ class TargetSslProxyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -45458,10 +51959,13 @@ def raw_page(self): class TargetTcpProxiesSetBackendServiceRequest(proto.Message): r""" + Attributes: service (str): The URL of the new BackendService resource for the targetTcpProxy. + + This field is a member of `oneof`_ ``_service``. """ service = proto.Field(proto.STRING, number=373540533, optional=True,) @@ -45469,10 +51973,13 @@ class TargetTcpProxiesSetBackendServiceRequest(proto.Message): class TargetTcpProxiesSetProxyHeaderRequest(proto.Message): r""" + Attributes: proxy_header (google.cloud.compute_v1.types.TargetTcpProxiesSetProxyHeaderRequest.ProxyHeader): The new type of proxy header to append before sending data to the backend. NONE or PROXY_V1 are allowed. + + This field is a member of `oneof`_ ``_proxy_header``. """ class ProxyHeader(proto.Enum): @@ -45498,16 +52005,24 @@ class TargetTcpProxy(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#targetTcpProxy for target TCP proxies. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -45517,6 +52032,8 @@ class TargetTcpProxy(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. proxy_bind (bool): This field only applies when the forwarding rule that references this target proxy has a loadBalancingScheme set @@ -45528,14 +52045,22 @@ class TargetTcpProxy(proto.Message): words, not a sidecar proxy). The Envoy proxy listens for inbound requests and handles requests when it receives them. The default is false. + + This field is a member of `oneof`_ ``_proxy_bind``. proxy_header (google.cloud.compute_v1.types.TargetTcpProxy.ProxyHeader): Specifies the type of proxy header to append before sending data to the backend, either NONE or PROXY_V1. The default is NONE. + + This field is a member of `oneof`_ ``_proxy_header``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. service (str): URL to the BackendService resource. + + This field is a member of `oneof`_ ``_service``. """ class ProxyHeader(proto.Enum): @@ -45561,14 +52086,19 @@ class ProxyHeader(proto.Enum): class TargetTcpProxyList(proto.Message): r"""Contains a list of TargetTcpProxy resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetTcpProxy]): A list of TargetTcpProxy resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -45576,10 +52106,16 @@ class TargetTcpProxyList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -45606,10 +52142,14 @@ class TargetVpnGateway(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. forwarding_rules (Sequence[str]): [Output Only] A list of URLs to the ForwardingRule resources. ForwardingRules are created using @@ -45618,9 +52158,13 @@ class TargetVpnGateway(proto.Message): id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#targetVpnGateway for target VPN gateways. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -45630,20 +52174,30 @@ class TargetVpnGateway(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (str): URL of the network to which this VPN gateway is attached. Provided by the client when the VPN gateway is created. + + This field is a member of `oneof`_ ``_network``. region (str): [Output Only] URL of the region where the target VPN gateway resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.TargetVpnGateway.Status): [Output Only] The status of the VPN gateway, which can be one of the following: CREATING, READY, FAILED, or DELETING. + + This field is a member of `oneof`_ ``_status``. tunnels (Sequence[str]): [Output Only] A list of URLs to VpnTunnel resources. VpnTunnels are created using the compute.vpntunnels.insert @@ -45675,15 +52229,20 @@ class Status(proto.Enum): class TargetVpnGatewayAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetVpnGatewayAggregatedList.ItemsEntry]): A list of TargetVpnGateway resources. kind (str): [Output Only] Type of resource. Always compute#targetVpnGateway for target VPN gateways. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -45691,12 +52250,18 @@ class TargetVpnGatewayAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -45721,15 +52286,20 @@ def raw_page(self): class TargetVpnGatewayList(proto.Message): r"""Contains a list of TargetVpnGateway resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.TargetVpnGateway]): A list of TargetVpnGateway resources. kind (str): [Output Only] Type of resource. Always compute#targetVpnGateway for target VPN gateways. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -45737,10 +52307,16 @@ class TargetVpnGatewayList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -45761,6 +52337,7 @@ def raw_page(self): class TargetVpnGatewaysScopedList(proto.Message): r""" + Attributes: target_vpn_gateways (Sequence[google.cloud.compute_v1.types.TargetVpnGateway]): [Output Only] A list of target VPN gateways contained in @@ -45768,6 +52345,8 @@ class TargetVpnGatewaysScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ target_vpn_gateways = proto.RepeatedField( @@ -45780,34 +52359,51 @@ class TargetVpnGatewaysScopedList(proto.Message): class TestFailure(proto.Message): r""" + Attributes: actual_output_url (str): The actual output URL evaluated by load balancer containing the scheme, host, path and query parameters. + + This field is a member of `oneof`_ ``_actual_output_url``. actual_redirect_response_code (int): Actual HTTP status code for rule with ``urlRedirect`` calculated by load balancer + + This field is a member of `oneof`_ ``_actual_redirect_response_code``. actual_service (str): BackendService or BackendBucket returned by load balancer. + + This field is a member of `oneof`_ ``_actual_service``. expected_output_url (str): The expected output URL evaluated by load balancer containing the scheme, host, path and query parameters. + + This field is a member of `oneof`_ ``_expected_output_url``. expected_redirect_response_code (int): Expected HTTP status code for rule with ``urlRedirect`` calculated by load balancer + + This field is a member of `oneof`_ ``_expected_redirect_response_code``. expected_service (str): Expected BackendService or BackendBucket resource the given URL should be mapped to. + + This field is a member of `oneof`_ ``_expected_service``. headers (Sequence[google.cloud.compute_v1.types.UrlMapTestHeader]): HTTP headers of the request. host (str): Host portion of the URL. + + This field is a member of `oneof`_ ``_host``. path (str): Path portion including query parameters in the URL. + + This field is a member of `oneof`_ ``_path``. """ actual_output_url = proto.Field(proto.STRING, number=287075458, optional=True,) @@ -46243,6 +52839,7 @@ class TestIamPermissionsVpnGatewayRequest(proto.Message): class TestPermissionsRequest(proto.Message): r""" + Attributes: permissions (Sequence[str]): The set of permissions to check for the 'resource'. @@ -46255,6 +52852,7 @@ class TestPermissionsRequest(proto.Message): class TestPermissionsResponse(proto.Message): r""" + Attributes: permissions (Sequence[str]): A subset of ``TestPermissionsRequest.permissions`` that the @@ -46266,11 +52864,14 @@ class TestPermissionsResponse(proto.Message): class Uint128(proto.Message): r""" + Attributes: high (int): + This field is a member of `oneof`_ ``_high``. low (int): + This field is a member of `oneof`_ ``_low``. """ high = proto.Field(proto.UINT64, number=3202466, optional=True,) @@ -46307,6 +52908,8 @@ class UpdateAccessConfigInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -46328,6 +52931,8 @@ class UpdateAutoscalerRequest(proto.Message): Attributes: autoscaler (str): Name of the autoscaler to update. + + This field is a member of `oneof`_ ``_autoscaler``. autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): The body resource for this request project (str): @@ -46348,6 +52953,8 @@ class UpdateAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): Name of the zone for this request. """ @@ -46388,6 +52995,8 @@ class UpdateBackendBucketRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_bucket = proto.Field(proto.STRING, number=91714037,) @@ -46426,6 +53035,8 @@ class UpdateBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -46463,6 +53074,8 @@ class UpdateDisplayDeviceInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -46503,6 +53116,8 @@ class UpdateFirewallRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ firewall = proto.Field(proto.STRING, number=511016192,) @@ -46538,6 +53153,8 @@ class UpdateHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check = proto.Field(proto.STRING, number=308876645,) @@ -46563,6 +53180,8 @@ class UpdateInstanceRequest(proto.Message): require it. If not specified, then Compute Engine acts based on the minimum action that the updated properties require. + + This field is a member of `oneof`_ ``_minimal_action``. most_disruptive_allowed_action (str): Specifies the most disruptive action that can be taken on the instance as part of the update. Compute Engine returns @@ -46570,6 +53189,8 @@ class UpdateInstanceRequest(proto.Message): disruptive action as part of the instance update. Valid options from lowest to highest are NO_EFFECT, REFRESH, and RESTART. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. project (str): Project ID for this request. request_id (str): @@ -46588,6 +53209,8 @@ class UpdateInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -46634,6 +53257,8 @@ class UpdateNetworkInterfaceInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone for this request. """ @@ -46676,6 +53301,8 @@ class UpdatePeeringNetworkRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ network = proto.Field(proto.STRING, number=232872494,) @@ -46715,6 +53342,8 @@ class UpdatePerInstanceConfigsInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. zone (str): The name of the zone where the managed instance group is located. It should conform to @@ -46764,6 +53393,8 @@ class UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ instance_group_manager = proto.Field(proto.STRING, number=249363395,) @@ -46784,6 +53415,8 @@ class UpdateRegionAutoscalerRequest(proto.Message): Attributes: autoscaler (str): Name of the autoscaler to update. + + This field is a member of `oneof`_ ``_autoscaler``. autoscaler_resource (google.cloud.compute_v1.types.Autoscaler): The body resource for this request project (str): @@ -46806,6 +53439,8 @@ class UpdateRegionAutoscalerRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ autoscaler = proto.Field(proto.STRING, number=517258967, optional=True,) @@ -46847,6 +53482,8 @@ class UpdateRegionBackendServiceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ backend_service = proto.Field(proto.STRING, number=306946058,) @@ -46887,6 +53524,8 @@ class UpdateRegionHealthCheckRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ health_check = proto.Field(proto.STRING, number=308876645,) @@ -46910,6 +53549,8 @@ class UpdateRegionUrlMapRequest(proto.Message): request_id (str): begin_interface: MixerMutationRequestBuilder Request ID to support idempotency. + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap resource to update. url_map_resource (google.cloud.compute_v1.types.UrlMap): @@ -46948,6 +53589,8 @@ class UpdateRouterRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. router (str): Name of the Router resource to update. router_resource (google.cloud.compute_v1.types.Router): @@ -46987,6 +53630,8 @@ class UpdateShieldedInstanceConfigInstanceRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. shielded_instance_config_resource (google.cloud.compute_v1.types.ShieldedInstanceConfig): The body resource for this request zone (str): @@ -47025,6 +53670,8 @@ class UpdateUrlMapRequest(proto.Message): must be a valid UUID with the exception that zero UUID is not supported ( 00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. url_map (str): Name of the UrlMap resource to update. url_map_resource (google.cloud.compute_v1.types.UrlMap): @@ -47059,6 +53706,8 @@ class UrlMap(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. default_route_action (google.cloud.compute_v1.types.HttpRouteAction): defaultRouteAction takes effect when none of the hostRules match. The load balancer performs @@ -47077,6 +53726,8 @@ class UrlMap(proto.Message): effect when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_default_route_action``. default_service (str): The full or partial URL of the defaultService resource to which traffic is directed if none of @@ -47094,6 +53745,8 @@ class UrlMap(proto.Message): be set. defaultService has no effect when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_default_service``. default_url_redirect (google.cloud.compute_v1.types.HttpRedirectAction): When none of the specified hostRules match, the request is redirected to a URL specified by @@ -47101,10 +53754,14 @@ class UrlMap(proto.Message): specified, defaultService or defaultRouteAction must not be set. Not supported when the URL map is bound to target gRPC proxy. + + This field is a member of `oneof`_ ``_default_url_redirect``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. fingerprint (str): Fingerprint of this resource. A hash of the contents stored in this object. This field is @@ -47115,6 +53772,8 @@ class UrlMap(proto.Message): error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve a UrlMap. + + This field is a member of `oneof`_ ``_fingerprint``. header_action (google.cloud.compute_v1.types.HttpHeaderAction): Specifies changes to request and response headers that need to take effect for the @@ -47126,14 +53785,20 @@ class UrlMap(proto.Message): EXTERNAL. Not supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_header_action``. host_rules (Sequence[google.cloud.compute_v1.types.HostRule]): The list of HostRules to use against the URL. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#urlMaps for url maps. + + This field is a member of `oneof`_ ``_kind``. name (str): Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, @@ -47143,6 +53808,8 @@ class UrlMap(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. path_matchers (Sequence[google.cloud.compute_v1.types.PathMatcher]): The list of named PathMatchers to use against the URL. @@ -47151,8 +53818,12 @@ class UrlMap(proto.Message): resides. This field is not applicable to global URL maps. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. tests (Sequence[google.cloud.compute_v1.types.UrlMapTest]): The list of expected URL mapping tests. Request to update this UrlMap will succeed only @@ -47191,14 +53862,19 @@ class UrlMap(proto.Message): class UrlMapList(proto.Message): r"""Contains a list of UrlMap resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.UrlMap]): A list of UrlMap resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -47206,10 +53882,16 @@ class UrlMapList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -47228,9 +53910,11 @@ def raw_page(self): class UrlMapReference(proto.Message): r""" + Attributes: url_map (str): + This field is a member of `oneof`_ ``_url_map``. """ url_map = proto.Field(proto.STRING, number=367020684, optional=True,) @@ -47238,9 +53922,12 @@ class UrlMapReference(proto.Message): class UrlMapTest(proto.Message): r"""Message for the expected URL mappings. + Attributes: description (str): Description of this test case. + + This field is a member of `oneof`_ ``_description``. expected_output_url (str): The expected output URL evaluated by load balancer containing the scheme, host, path and query parameters. For @@ -47258,12 +53945,16 @@ class UrlMapTest(proto.Message): test passes only if expectedOutputUrl does not contain any query parameters. expectedOutputUrl is optional when service is specified. + + This field is a member of `oneof`_ ``_expected_output_url``. expected_redirect_response_code (int): For rules with urlRedirect, the test passes only if expectedRedirectResponseCode matches the HTTP status code in load balancer's redirect response. expectedRedirectResponseCode cannot be set when service is set. + + This field is a member of `oneof`_ ``_expected_redirect_response_code``. headers (Sequence[google.cloud.compute_v1.types.UrlMapTestHeader]): HTTP headers for this request. If headers contains a host header, then host must also @@ -47272,13 +53963,19 @@ class UrlMapTest(proto.Message): Host portion of the URL. If headers contains a host header, then host must also match the header value. + + This field is a member of `oneof`_ ``_host``. path (str): Path portion of the URL. + + This field is a member of `oneof`_ ``_path``. service (str): Expected BackendService or BackendBucket resource the given URL should be mapped to. service cannot be set if expectedRedirectResponseCode is set. + + This field is a member of `oneof`_ ``_service``. """ description = proto.Field(proto.STRING, number=422937596, optional=True,) @@ -47296,11 +53993,16 @@ class UrlMapTest(proto.Message): class UrlMapTestHeader(proto.Message): r"""HTTP headers used in UrlMapTests. + Attributes: name (str): Header name. + + This field is a member of `oneof`_ ``_name``. value (str): Header value. + + This field is a member of `oneof`_ ``_value``. """ name = proto.Field(proto.STRING, number=3373707, optional=True,) @@ -47309,6 +54011,7 @@ class UrlMapTestHeader(proto.Message): class UrlMapValidationResult(proto.Message): r"""Message representing the validation result for a UrlMap. + Attributes: load_errors (Sequence[str]): @@ -47316,12 +54019,16 @@ class UrlMapValidationResult(proto.Message): Whether the given UrlMap can be successfully loaded. If false, 'loadErrors' indicates the reasons. + + This field is a member of `oneof`_ ``_load_succeeded``. test_failures (Sequence[google.cloud.compute_v1.types.TestFailure]): test_passed (bool): If successfully loaded, this field indicates whether the test passed. If false, 'testFailures's indicate the reason of failure. + + This field is a member of `oneof`_ ``_test_passed``. """ load_errors = proto.RepeatedField(proto.STRING, number=310147300,) @@ -47334,14 +54041,19 @@ class UrlMapValidationResult(proto.Message): class UrlMapsAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.UrlMapsAggregatedList.ItemsEntry]): A list of UrlMapsScopedList resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -47349,12 +54061,18 @@ class UrlMapsAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -47376,12 +54094,15 @@ def raw_page(self): class UrlMapsScopedList(proto.Message): r""" + Attributes: url_maps (Sequence[google.cloud.compute_v1.types.UrlMap]): A list of UrlMaps contained in this scope. warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ url_maps = proto.RepeatedField(proto.MESSAGE, number=103352167, message="UrlMap",) @@ -47392,9 +54113,12 @@ class UrlMapsScopedList(proto.Message): class UrlMapsValidateRequest(proto.Message): r""" + Attributes: resource (google.cloud.compute_v1.types.UrlMap): Content of the UrlMap to be validated. + + This field is a member of `oneof`_ ``_resource``. """ resource = proto.Field( @@ -47404,9 +54128,11 @@ class UrlMapsValidateRequest(proto.Message): class UrlMapsValidateResponse(proto.Message): r""" + Attributes: result (google.cloud.compute_v1.types.UrlMapValidationResult): + This field is a member of `oneof`_ ``_result``. """ result = proto.Field( @@ -47427,12 +54153,16 @@ class UrlRewrite(proto.Message): selected service, the request's host header is replaced with contents of hostRewrite. The value must be between 1 and 255 characters. + + This field is a member of `oneof`_ ``_host_rewrite``. path_prefix_rewrite (str): Prior to forwarding the request to the selected backend service, the matching portion of the request's path is replaced by pathPrefixRewrite. The value must be between 1 and 1024 characters. + + This field is a member of `oneof`_ ``_path_prefix_rewrite``. """ host_rewrite = proto.Field(proto.STRING, number=159819253, optional=True,) @@ -47447,12 +54177,18 @@ class UsableSubnetwork(proto.Message): ip_cidr_range (str): The range of internal addresses that are owned by this subnetwork. + + This field is a member of `oneof`_ ``_ip_cidr_range``. network (str): Network URL. + + This field is a member of `oneof`_ ``_network``. secondary_ip_ranges (Sequence[google.cloud.compute_v1.types.UsableSubnetworkSecondaryRange]): Secondary IP ranges. subnetwork (str): Subnetwork URL. + + This field is a member of `oneof`_ ``_subnetwork``. """ ip_cidr_range = proto.Field(proto.STRING, number=98117322, optional=True,) @@ -47465,16 +54201,21 @@ class UsableSubnetwork(proto.Message): class UsableSubnetworkSecondaryRange(proto.Message): r"""Secondary IP range of a usable subnetwork. + Attributes: ip_cidr_range (str): The range of IP addresses belonging to this subnetwork secondary range. + + This field is a member of `oneof`_ ``_ip_cidr_range``. range_name (str): The name associated with this subnetwork secondary range, used when adding an alias IP range to a VM instance. The name must be 1-63 characters long, and comply with RFC1035. The name must be unique within the subnetwork. + + This field is a member of `oneof`_ ``_range_name``. """ ip_cidr_range = proto.Field(proto.STRING, number=98117322, optional=True,) @@ -47483,16 +54224,21 @@ class UsableSubnetworkSecondaryRange(proto.Message): class UsableSubnetworksAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.UsableSubnetwork]): [Output] A list of usable subnetwork URLs. kind (str): [Output Only] Type of resource. Always compute#usableSubnetworksAggregatedList for aggregated lists of usable subnetworks. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -47502,10 +54248,16 @@ class UsableSubnetworksAggregatedList(proto.Message): to continue paging through the results. In special cases listUsable may return 0 subnetworks and nextPageToken which still should be used to get the next page of results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -47538,6 +54290,8 @@ class UsageExportLocation(proto.Message): or the bucket name with gs:// or https://storage.googleapis.com/ in front of it, such as gs://example-bucket. + + This field is a member of `oneof`_ ``_bucket_name``. report_name_prefix (str): An optional prefix for the name of the usage report object stored in bucketName. If not supplied, defaults to @@ -47546,6 +54300,8 @@ class UsageExportLocation(proto.Message): day of the usage according to Pacific Time. If you supply a prefix, it should conform to Cloud Storage object naming conventions. + + This field is a member of `oneof`_ ``_report_name_prefix``. """ bucket_name = proto.Field(proto.STRING, number=283610048, optional=True,) @@ -47605,6 +54361,8 @@ class VmEndpointNatMappings(proto.Message): instance_name (str): Name of the VM instance which the endpoint belongs to + + This field is a member of `oneof`_ ``_instance_name``. interface_nat_mappings (Sequence[google.cloud.compute_v1.types.VmEndpointNatMappingsInterfaceNatMappings]): """ @@ -47636,16 +54394,24 @@ class VmEndpointNatMappingsInterfaceNatMappings(proto.Message): Total number of drain ports across all NAT IPs allocated to this interface. It equals to the aggregated port number in the field drain_nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_drain_nat_ports``. num_total_nat_ports (int): Total number of ports across all NAT IPs allocated to this interface. It equals to the aggregated port number in the field nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_nat_ports``. source_alias_ip_range (str): Alias IP range for this interface endpoint. It will be a private (RFC 1918) IP range. Examples: "10.33.4.55/32", or "192.168.5.0/24". + + This field is a member of `oneof`_ ``_source_alias_ip_range``. source_virtual_ip (str): Primary IP of the VM for this NIC. + + This field is a member of `oneof`_ ``_source_virtual_ip``. """ drain_nat_ip_port_ranges = proto.RepeatedField(proto.STRING, number=395440577,) @@ -47660,14 +54426,19 @@ class VmEndpointNatMappingsInterfaceNatMappings(proto.Message): class VmEndpointNatMappingsList(proto.Message): r"""Contains a list of VmEndpointNatMappings. + Attributes: id (str): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#vmEndpointNatMappingsList for lists of Nat mappings of VM endpoints. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -47675,13 +54446,19 @@ class VmEndpointNatMappingsList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. result (Sequence[google.cloud.compute_v1.types.VmEndpointNatMappings]): [Output Only] A list of Nat mapping information of VM endpoints. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -47711,16 +54488,24 @@ class VpnGateway(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of resource. Always compute#vpnGateway for VPN gateways. + + This field is a member of `oneof`_ ``_kind``. label_fingerprint (str): A fingerprint for the labels being applied to this VpnGateway, which is essentially a hash of @@ -47733,6 +54518,8 @@ class VpnGateway(proto.Message): with error 412 conditionNotMet. To see the latest fingerprint, make a get() request to retrieve an VpnGateway. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.VpnGateway.LabelsEntry]): Labels for this resource. These can only be added or modified by the setLabels method. Each @@ -47747,15 +54534,23 @@ class VpnGateway(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. network (str): URL of the network to which this VPN gateway is attached. Provided by the client when the VPN gateway is created. + + This field is a member of `oneof`_ ``_network``. region (str): [Output Only] URL of the region where the VPN gateway resides. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. vpn_interfaces (Sequence[google.cloud.compute_v1.types.VpnGatewayVpnGatewayInterface]): The list of VPN interfaces associated with this VPN gateway. @@ -47778,15 +54573,20 @@ class VpnGateway(proto.Message): class VpnGatewayAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.VpnGatewayAggregatedList.ItemsEntry]): A list of VpnGateway resources. kind (str): [Output Only] Type of resource. Always compute#vpnGateway for VPN gateways. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -47794,12 +54594,18 @@ class VpnGatewayAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -47821,15 +54627,20 @@ def raw_page(self): class VpnGatewayList(proto.Message): r"""Contains a list of VpnGateway resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.VpnGateway]): A list of VpnGateway resources. kind (str): [Output Only] Type of resource. Always compute#vpnGateway for VPN gateways. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -47837,10 +54648,16 @@ class VpnGatewayList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -47859,6 +54676,7 @@ def raw_page(self): class VpnGatewayStatus(proto.Message): r""" + Attributes: vpn_connections (Sequence[google.cloud.compute_v1.types.VpnGatewayStatusVpnConnection]): List of VPN connection for this VpnGateway. @@ -47878,10 +54696,14 @@ class VpnGatewayStatusHighAvailabilityRequirementState(proto.Message): Indicates the high availability requirement state for the VPN connection. Valid values are CONNECTION_REDUNDANCY_MET, CONNECTION_REDUNDANCY_NOT_MET. + + This field is a member of `oneof`_ ``_state``. unsatisfied_reason (google.cloud.compute_v1.types.VpnGatewayStatusHighAvailabilityRequirementState.UnsatisfiedReason): Indicates the reason why the VPN connection does not meet the high availability redundancy criteria/requirement. Valid values is INCOMPLETE_TUNNELS_COVERAGE. + + This field is a member of `oneof`_ ``_unsatisfied_reason``. """ class State(proto.Enum): @@ -47909,16 +54731,23 @@ class UnsatisfiedReason(proto.Enum): class VpnGatewayStatusTunnel(proto.Message): r"""Contains some information about a VPN tunnel. + Attributes: local_gateway_interface (int): The VPN gateway interface this VPN tunnel is associated with. + + This field is a member of `oneof`_ ``_local_gateway_interface``. peer_gateway_interface (int): The peer gateway interface this VPN tunnel is connected to, the peer gateway could either be an external VPN gateway or GCP VPN gateway. + + This field is a member of `oneof`_ ``_peer_gateway_interface``. tunnel_url (str): URL reference to the VPN tunnel. + + This field is a member of `oneof`_ ``_tunnel_url``. """ local_gateway_interface = proto.Field( @@ -47938,13 +54767,19 @@ class VpnGatewayStatusVpnConnection(proto.Message): URL reference to the peer external VPN gateways to which the VPN tunnels in this VPN connection are connected. This field is mutually exclusive with peer_gcp_gateway. + + This field is a member of `oneof`_ ``_peer_external_gateway``. peer_gcp_gateway (str): URL reference to the peer side VPN gateways to which the VPN tunnels in this VPN connection are connected. This field is mutually exclusive with peer_gcp_gateway. + + This field is a member of `oneof`_ ``_peer_gcp_gateway``. state (google.cloud.compute_v1.types.VpnGatewayStatusHighAvailabilityRequirementState): HighAvailabilityRequirementState for the VPN connection. + + This field is a member of `oneof`_ ``_state``. tunnels (Sequence[google.cloud.compute_v1.types.VpnGatewayStatusTunnel]): List of VPN tunnels that are in this VPN connection. @@ -47965,10 +54800,13 @@ class VpnGatewayStatusVpnConnection(proto.Message): class VpnGatewayVpnGatewayInterface(proto.Message): r"""A VPN gateway interface. + Attributes: id (int): [Output Only] Numeric identifier for this VPN interface associated with the VPN gateway. + + This field is a member of `oneof`_ ``_id``. interconnect_attachment (str): URL of the VLAN attachment (interconnectAttachment) resource for this VPN @@ -47978,6 +54816,8 @@ class VpnGatewayVpnGatewayInterface(proto.Message): ingress traffic for this VPN gateway interface goes through the specified VLAN attachment resource. Not currently available publicly. + + This field is a member of `oneof`_ ``_interconnect_attachment``. ip_address (str): [Output Only] IP address for this VPN interface associated with the VPN gateway. The IP address could be either a @@ -47991,6 +54831,8 @@ class VpnGatewayVpnGatewayInterface(proto.Message): external IP addresses. For regular (non IPsec-encrypted Cloud Interconnect) HA VPN tunnels, the IP address must be a regional external IP address. + + This field is a member of `oneof`_ ``_ip_address``. """ id = proto.Field(proto.UINT32, number=3355, optional=True,) @@ -48002,9 +54844,11 @@ class VpnGatewayVpnGatewayInterface(proto.Message): class VpnGatewaysGetStatusResponse(proto.Message): r""" + Attributes: result (google.cloud.compute_v1.types.VpnGatewayStatus): + This field is a member of `oneof`_ ``_result``. """ result = proto.Field( @@ -48014,6 +54858,7 @@ class VpnGatewaysGetStatusResponse(proto.Message): class VpnGatewaysScopedList(proto.Message): r""" + Attributes: vpn_gateways (Sequence[google.cloud.compute_v1.types.VpnGateway]): [Output Only] A list of VPN gateways contained in this @@ -48021,6 +54866,8 @@ class VpnGatewaysScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ vpn_gateways = proto.RepeatedField( @@ -48038,23 +54885,35 @@ class VpnTunnel(proto.Message): Attributes: creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. description (str): An optional description of this resource. Provide this property when you create the resource. + + This field is a member of `oneof`_ ``_description``. detailed_status (str): [Output Only] Detailed status message for the VPN tunnel. + + This field is a member of `oneof`_ ``_detailed_status``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. ike_version (int): IKE protocol version to use when establishing the VPN tunnel with the peer VPN gateway. Acceptable IKE versions are 1 or 2. The default version is 2. + + This field is a member of `oneof`_ ``_ike_version``. kind (str): [Output Only] Type of resource. Always compute#vpnTunnel for VPN tunnels. + + This field is a member of `oneof`_ ``_kind``. local_traffic_selector (Sequence[str]): Local traffic selector to use when establishing the VPN tunnel with the peer VPN @@ -48070,16 +54929,22 @@ class VpnTunnel(proto.Message): character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. peer_external_gateway (str): URL of the peer side external VPN gateway to which this VPN tunnel is connected. Provided by the client when the VPN tunnel is created. This field is exclusive with the field peerGcpGateway. + + This field is a member of `oneof`_ ``_peer_external_gateway``. peer_external_gateway_interface (int): The interface ID of the external VPN gateway to which this VPN tunnel is connected. Provided by the client when the VPN tunnel is created. + + This field is a member of `oneof`_ ``_peer_external_gateway_interface``. peer_gcp_gateway (str): URL of the peer side HA GCP VPN gateway to which this VPN tunnel is connected. Provided by @@ -48090,14 +54955,20 @@ class VpnTunnel(proto.Message): If provided, the VPN tunnel will automatically use the same vpnGatewayInterface ID in the peer GCP VPN gateway. + + This field is a member of `oneof`_ ``_peer_gcp_gateway``. peer_ip (str): IP address of the peer VPN gateway. Only IPv4 is supported. + + This field is a member of `oneof`_ ``_peer_ip``. region (str): [Output Only] URL of the region where the VPN tunnel resides. You must specify this field as part of the HTTP request URL. It is not settable as a field in the request body. + + This field is a member of `oneof`_ ``_region``. remote_traffic_selector (Sequence[str]): Remote traffic selectors to use when establishing the VPN tunnel with the peer VPN @@ -48107,14 +54978,22 @@ class VpnTunnel(proto.Message): router (str): URL of the router resource to be used for dynamic routing. + + This field is a member of `oneof`_ ``_router``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. shared_secret (str): Shared secret used to set the secure session between the Cloud VPN gateway and the peer VPN gateway. + + This field is a member of `oneof`_ ``_shared_secret``. shared_secret_hash (str): Hash of the shared secret. + + This field is a member of `oneof`_ ``_shared_secret_hash``. status (google.cloud.compute_v1.types.VpnTunnel.Status): [Output Only] The status of the VPN tunnel, which can be one of the following: - PROVISIONING: Resource is being @@ -48140,18 +55019,26 @@ class VpnTunnel(proto.Message): PEER_IDENTITY_MISMATCH: Peer identity does not match peer IP, probably behind NAT. - TS_NARROWING_NOT_ALLOWED: Traffic selector narrowing not allowed for an HA-VPN tunnel. + + This field is a member of `oneof`_ ``_status``. target_vpn_gateway (str): URL of the Target VPN gateway with which this VPN tunnel is associated. Provided by the client when the VPN tunnel is created. + + This field is a member of `oneof`_ ``_target_vpn_gateway``. vpn_gateway (str): URL of the VPN gateway with which this VPN tunnel is associated. Provided by the client when the VPN tunnel is created. This must be used (instead of target_vpn_gateway) if a High Availability VPN gateway resource is created. + + This field is a member of `oneof`_ ``_vpn_gateway``. vpn_gateway_interface (int): The interface ID of the VPN gateway with which this VPN tunnel is associated. + + This field is a member of `oneof`_ ``_vpn_gateway_interface``. """ class Status(proto.Enum): @@ -48220,15 +55107,20 @@ class Status(proto.Enum): class VpnTunnelAggregatedList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.VpnTunnelAggregatedList.ItemsEntry]): A list of VpnTunnelsScopedList resources. kind (str): [Output Only] Type of resource. Always compute#vpnTunnel for VPN tunnels. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -48236,12 +55128,18 @@ class VpnTunnelAggregatedList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. unreachables (Sequence[str]): [Output Only] Unreachable resources. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -48263,15 +55161,20 @@ def raw_page(self): class VpnTunnelList(proto.Message): r"""Contains a list of VpnTunnel resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.VpnTunnel]): A list of VpnTunnel resources. kind (str): [Output Only] Type of resource. Always compute#vpnTunnel for VPN tunnels. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -48279,10 +55182,16 @@ class VpnTunnelList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -48301,6 +55210,7 @@ def raw_page(self): class VpnTunnelsScopedList(proto.Message): r""" + Attributes: vpn_tunnels (Sequence[google.cloud.compute_v1.types.VpnTunnel]): A list of VPN tunnels contained in this @@ -48308,6 +55218,8 @@ class VpnTunnelsScopedList(proto.Message): warning (google.cloud.compute_v1.types.Warning): Informational warning which replaces the list of addresses when the list is empty. + + This field is a member of `oneof`_ ``_warning``. """ vpn_tunnels = proto.RepeatedField( @@ -48320,6 +55232,7 @@ class VpnTunnelsScopedList(proto.Message): class WafExpressionSet(proto.Message): r""" + Attributes: aliases (Sequence[str]): A list of alternate IDs. The format should @@ -48333,6 +55246,8 @@ class WafExpressionSet(proto.Message): id (str): Google specified expression set ID. The format should be: - E.g. XSS-20170329 required + + This field is a member of `oneof`_ ``_id``. """ aliases = proto.RepeatedField(proto.STRING, number=159207166,) @@ -48344,6 +55259,7 @@ class WafExpressionSet(proto.Message): class WafExpressionSetExpression(proto.Message): r""" + Attributes: id (str): Expression ID should uniquely identify the @@ -48354,6 +55270,8 @@ class WafExpressionSetExpression(proto.Message): definition that has been detected. It could also be used to exclude it from the policy in case of false positive. required + + This field is a member of `oneof`_ ``_id``. """ id = proto.Field(proto.STRING, number=3355, optional=True,) @@ -48412,11 +55330,14 @@ class WaitZoneOperationRequest(proto.Message): class Warning(proto.Message): r"""[Output Only] Informational warning message. + Attributes: code (google.cloud.compute_v1.types.Warning.Code): [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response. + + This field is a member of `oneof`_ ``_code``. data (Sequence[google.cloud.compute_v1.types.Data]): [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": "scope", "value": @@ -48424,6 +55345,8 @@ class Warning(proto.Message): message (str): [Output Only] A human-readable description of the warning code. + + This field is a member of `oneof`_ ``_message``. """ class Code(proto.Enum): @@ -48465,11 +55388,14 @@ class Code(proto.Enum): class Warnings(proto.Message): r""" + Attributes: code (google.cloud.compute_v1.types.Warnings.Code): [Output Only] A warning code, if applicable. For example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no results in the response. + + This field is a member of `oneof`_ ``_code``. data (Sequence[google.cloud.compute_v1.types.Data]): [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": "scope", "value": @@ -48477,6 +55403,8 @@ class Warnings(proto.Message): message (str): [Output Only] A human-readable description of the warning code. + + This field is a member of `oneof`_ ``_message``. """ class Code(proto.Enum): @@ -48531,6 +55459,8 @@ class WeightedBackendService(proto.Message): request to backendService, the loadbalancer applies any relevant headerActions specified as part of this backendServiceWeight. + + This field is a member of `oneof`_ ``_backend_service``. header_action (google.cloud.compute_v1.types.HttpHeaderAction): Specifies changes to request and response headers that need to take effect for the @@ -48543,6 +55473,8 @@ class WeightedBackendService(proto.Message): supported when the URL map is bound to target gRPC proxy that has validateForProxyless field set to true. + + This field is a member of `oneof`_ ``_header_action``. weight (int): Specifies the fraction of traffic sent to backendService, computed as weight / (sum of all @@ -48554,6 +55486,8 @@ class WeightedBackendService(proto.Message): as determined by the BackendService's session affinity policy. The value must be between 0 and 1000 + + This field is a member of `oneof`_ ``_weight``. """ backend_service = proto.Field(proto.STRING, number=306946058, optional=True,) @@ -48565,15 +55499,20 @@ class WeightedBackendService(proto.Message): class XpnHostList(proto.Message): r""" + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Project]): [Output Only] A list of shared VPC host project URLs. kind (str): [Output Only] Type of resource. Always compute#xpnHostList for lists of shared VPC hosts. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -48581,10 +55520,16 @@ class XpnHostList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -48603,14 +55548,19 @@ def raw_page(self): class XpnResourceId(proto.Message): r"""Service resource (a.k.a service project) ID. + Attributes: id (str): The ID of the service resource. In the case of projects, this field supports project id (e.g., my-project-123) and project number (e.g. 12345678). + + This field is a member of `oneof`_ ``_id``. type_ (google.cloud.compute_v1.types.XpnResourceId.Type): The type of the service resource. + + This field is a member of `oneof`_ ``_type``. """ class Type(proto.Enum): @@ -48635,28 +55585,48 @@ class Zone(proto.Message): zone. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. + + This field is a member of `oneof`_ ``_creation_timestamp``. deprecated (google.cloud.compute_v1.types.DeprecationStatus): [Output Only] The deprecation status associated with this zone. + + This field is a member of `oneof`_ ``_deprecated``. description (str): [Output Only] Textual description of the resource. + + This field is a member of `oneof`_ ``_description``. id (int): [Output Only] The unique identifier for the resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. kind (str): [Output Only] Type of the resource. Always compute#zone for zones. + + This field is a member of `oneof`_ ``_kind``. name (str): [Output Only] Name of the resource. + + This field is a member of `oneof`_ ``_name``. region (str): [Output Only] Full URL reference to the region which hosts the zone. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. + + This field is a member of `oneof`_ ``_self_link``. status (google.cloud.compute_v1.types.Zone.Status): [Output Only] Status of the zone, either UP or DOWN. + + This field is a member of `oneof`_ ``_status``. supports_pzs (bool): [Output Only] Reserved for future use. + + This field is a member of `oneof`_ ``_supports_pzs``. """ class Status(proto.Enum): @@ -48682,14 +55652,19 @@ class Status(proto.Enum): class ZoneList(proto.Message): r"""Contains a list of zone resources. + Attributes: id (str): [Output Only] Unique identifier for the resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. items (Sequence[google.cloud.compute_v1.types.Zone]): A list of Zone resources. kind (str): Type of resource. + + This field is a member of `oneof`_ ``_kind``. next_page_token (str): [Output Only] This token allows you to get the next page of results for list requests. If the number of results is @@ -48697,10 +55672,16 @@ class ZoneList(proto.Message): the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. self_link (str): [Output Only] Server-defined URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. warning (google.cloud.compute_v1.types.Warning): [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. """ @property @@ -48719,6 +55700,7 @@ def raw_page(self): class ZoneSetLabelsRequest(proto.Message): r""" + Attributes: label_fingerprint (str): The fingerprint of the previous set of labels @@ -48729,6 +55711,8 @@ class ZoneSetLabelsRequest(proto.Message): to-date fingerprint hash in order to update or change labels. Make a get() request to the resource to get the latest fingerprint. + + This field is a member of `oneof`_ ``_label_fingerprint``. labels (Sequence[google.cloud.compute_v1.types.ZoneSetLabelsRequest.LabelsEntry]): The labels to set for this resource. """ @@ -48739,6 +55723,7 @@ class ZoneSetLabelsRequest(proto.Message): class ZoneSetPolicyRequest(proto.Message): r""" + Attributes: bindings (Sequence[google.cloud.compute_v1.types.Binding]): Flatten Policy to create a backwacd @@ -48748,12 +55733,16 @@ class ZoneSetPolicyRequest(proto.Message): Flatten Policy to create a backward compatible wire-format. Deprecated. Use 'policy' to specify the etag. + + This field is a member of `oneof`_ ``_etag``. policy (google.cloud.compute_v1.types.Policy): REQUIRED: The complete policy to be applied to the 'resource'. The size of the policy is limited to a few 10s of KB. An empty policy is in general a valid policy but certain services (like Projects) might reject them. + + This field is a member of `oneof`_ ``_policy``. """ bindings = proto.RepeatedField(proto.MESSAGE, number=403251854, message="Binding",) diff --git a/noxfile.py b/noxfile.py index eec515955..f041f1f5a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -175,7 +175,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=96") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -190,6 +190,7 @@ def docs(session): shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", + "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors "-b", diff --git a/owlbot.py b/owlbot.py new file mode 100644 index 000000000..b93f3c8a0 --- /dev/null +++ b/owlbot.py @@ -0,0 +1,63 @@ +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import synthtool as s +import synthtool.gcp as gcp +from synthtool.languages import python + +# ---------------------------------------------------------------------------- +# Copy the generated client from the owl-bot staging directory +# ---------------------------------------------------------------------------- + +default_version = "v1" + +for library in s.get_staging_dirs(default_version): + s.move(library, excludes=["setup.py", "README.rst"]) +s.remove_staging_dirs() + +# Work around gapic generator bug https://github.com/googleapis/gapic-generator-python/issues/1083 +s.replace( + "google/cloud/**/types/compute.py", + """A request message for InstanceGroupManagers.AbandonInstances. + See the method description for details.\n + Attributes""", + """A request message for InstanceGroupManagers.AbandonInstances. + See the method description for details.\n + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields\n + Attributes""" +) + +# Work around formatting issues with docstrings +s.replace("google/cloud/**/types/compute.py", """\"IT_\"""", """`IT_`""") +s.replace("google/cloud/**/types/compute.py", """\"NS_\"""", """`NS_`""") + +# ---------------------------------------------------------------------------- +# Add templated files +# ---------------------------------------------------------------------------- + +templated_files = gcp.CommonTemplates().py_library( + microgenerator=True, + cov_level=98, + versions=gcp.common.detect_versions(path="./google", default_first=True), +) + +s.move(templated_files, excludes=[".coveragerc"]) # the microgenerator has a good coveragerc file + +python.py_samples(skip_readmes=True) + +# ---------------------------------------------------------------------------- +# Run blacken session +# ---------------------------------------------------------------------------- + +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/samples/snippets/sample_default_values.py b/samples/snippets/sample_default_values.py index 53dbaf62d..40f07ea1d 100644 --- a/samples/snippets/sample_default_values.py +++ b/samples/snippets/sample_default_values.py @@ -112,10 +112,10 @@ def disable_usage_export(project_id: str) -> None: """ projects_client = compute_v1.ProjectsClient() - # Updating the setting with None will disable the - # usage report generation. + # Setting `usage_export_location_resource` to an + # empty object will disable the usage report generation. operation = projects_client.set_usage_export_bucket( - project=project_id, usage_export_location_resource=None) + project=project_id, usage_export_location_resource={}) op_client = compute_v1.GlobalOperationsClient() diff --git a/samples/snippets/test_sample_default_values.py b/samples/snippets/test_sample_default_values.py index 613c6efa3..16216101c 100644 --- a/samples/snippets/test_sample_default_values.py +++ b/samples/snippets/test_sample_default_values.py @@ -51,9 +51,8 @@ def test_set_usage_export_bucket_default(capsys: typing.Any, assert(uel.bucket_name == '') assert(uel.report_name_prefix == '') - -def test_set_usage_export_bucket_custom(capsys: typing.Any, - temp_bucket: storage.Bucket) -> None: + # Testing setting a custom export bucket. Keeping this in one test function + # to avoid race conditions, as this is a global setting for the project. set_usage_export_bucket(project_id=PROJECT, bucket_name=temp_bucket.name, report_name_prefix=TEST_PREFIX) time.sleep(5) # To make sure the settings are properly updated diff --git a/scripts/fixup_compute_v1_keywords.py b/scripts/fixup_compute_v1_keywords.py new file mode 100644 index 000000000..1be5ad369 --- /dev/null +++ b/scripts/fixup_compute_v1_keywords.py @@ -0,0 +1,303 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class computeCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'abandon_instances': ('instance_group_manager', 'instance_group_managers_abandon_instances_request_resource', 'project', 'zone', 'request_id', ), + 'add_access_config': ('access_config_resource', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), + 'add_association': ('firewall_policy', 'firewall_policy_association_resource', 'replace_existing_association', 'request_id', ), + 'add_health_check': ('project', 'region', 'target_pool', 'target_pools_add_health_check_request_resource', 'request_id', ), + 'add_instance': ('project', 'region', 'target_pool', 'target_pools_add_instance_request_resource', 'request_id', ), + 'add_instances': ('instance_group', 'instance_groups_add_instances_request_resource', 'project', 'zone', 'request_id', ), + 'add_nodes': ('node_group', 'node_groups_add_nodes_request_resource', 'project', 'zone', 'request_id', ), + 'add_peering': ('network', 'networks_add_peering_request_resource', 'project', 'request_id', ), + 'add_resource_policies': ('disk', 'disks_add_resource_policies_request_resource', 'project', 'zone', 'request_id', ), + 'add_rule': ('firewall_policy', 'firewall_policy_rule_resource', 'request_id', ), + 'add_signed_url_key': ('backend_bucket', 'project', 'signed_url_key_resource', 'request_id', ), + 'aggregated_list': ('project', 'filter', 'include_all_scopes', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'apply_updates_to_instances': ('instance_group_manager', 'instance_group_managers_apply_updates_request_resource', 'project', 'zone', ), + 'attach_disk': ('attached_disk_resource', 'instance', 'project', 'zone', 'force_attach', 'request_id', ), + 'attach_network_endpoints': ('global_network_endpoint_groups_attach_endpoints_request_resource', 'network_endpoint_group', 'project', 'request_id', ), + 'bulk_insert': ('bulk_insert_instance_resource_resource', 'project', 'zone', 'request_id', ), + 'clone_rules': ('firewall_policy', 'request_id', 'source_firewall_policy', ), + 'create_instances': ('instance_group_manager', 'instance_group_managers_create_instances_request_resource', 'project', 'zone', 'request_id', ), + 'create_snapshot': ('disk', 'project', 'snapshot_resource', 'zone', 'guest_flush', 'request_id', ), + 'delete': ('address', 'project', 'region', 'request_id', ), + 'delete_access_config': ('access_config', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), + 'delete_instances': ('instance_group_manager', 'instance_group_managers_delete_instances_request_resource', 'project', 'zone', 'request_id', ), + 'delete_nodes': ('node_group', 'node_groups_delete_nodes_request_resource', 'project', 'zone', 'request_id', ), + 'delete_per_instance_configs': ('instance_group_manager', 'instance_group_managers_delete_per_instance_configs_req_resource', 'project', 'zone', ), + 'delete_signed_url_key': ('backend_bucket', 'key_name', 'project', 'request_id', ), + 'deprecate': ('deprecation_status_resource', 'image', 'project', 'request_id', ), + 'detach_disk': ('device_name', 'instance', 'project', 'zone', 'request_id', ), + 'detach_network_endpoints': ('global_network_endpoint_groups_detach_endpoints_request_resource', 'network_endpoint_group', 'project', 'request_id', ), + 'disable_xpn_host': ('project', 'request_id', ), + 'disable_xpn_resource': ('project', 'projects_disable_xpn_resource_request_resource', 'request_id', ), + 'enable_xpn_host': ('project', 'request_id', ), + 'enable_xpn_resource': ('project', 'projects_enable_xpn_resource_request_resource', 'request_id', ), + 'expand_ip_cidr_range': ('project', 'region', 'subnetwork', 'subnetworks_expand_ip_cidr_range_request_resource', 'request_id', ), + 'get': ('accelerator_type', 'project', 'zone', ), + 'get_association': ('firewall_policy', 'name', ), + 'get_diagnostics': ('interconnect', 'project', ), + 'get_effective_firewalls': ('instance', 'network_interface', 'project', 'zone', ), + 'get_from_family': ('family', 'project', ), + 'get_guest_attributes': ('instance', 'project', 'zone', 'query_path', 'variable_key', ), + 'get_health': ('backend_service', 'project', 'resource_group_reference_resource', ), + 'get_iam_policy': ('project', 'resource', 'zone', 'options_requested_policy_version', ), + 'get_nat_mapping_info': ('project', 'region', 'router', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'get_router_status': ('project', 'region', 'router', ), + 'get_rule': ('firewall_policy', 'priority', ), + 'get_screenshot': ('instance', 'project', 'zone', ), + 'get_serial_port_output': ('instance', 'project', 'zone', 'port', 'start', ), + 'get_shielded_instance_identity': ('instance', 'project', 'zone', ), + 'get_status': ('project', 'region', 'vpn_gateway', ), + 'get_xpn_host': ('project', ), + 'get_xpn_resources': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'insert': ('address_resource', 'project', 'region', 'request_id', ), + 'invalidate_cache': ('cache_invalidation_rule_resource', 'project', 'url_map', 'request_id', ), + 'list': ('project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_associations': ('target_resource', ), + 'list_available_features': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_errors': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_instances': ('instance_group', 'instance_groups_list_instances_request_resource', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_managed_instances': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_network_endpoints': ('network_endpoint_group', 'project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_nodes': ('node_group', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_peering_routes': ('network', 'project', 'direction', 'filter', 'max_results', 'order_by', 'page_token', 'peering_name', 'region', 'return_partial_success', ), + 'list_per_instance_configs': ('instance_group_manager', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_preconfigured_expression_sets': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_referrers': ('instance', 'project', 'zone', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_usable': ('project', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'list_xpn_hosts': ('project', 'projects_list_xpn_hosts_request_resource', 'filter', 'max_results', 'order_by', 'page_token', 'return_partial_success', ), + 'move': ('firewall_policy', 'parent_id', 'request_id', ), + 'move_disk': ('disk_move_request_resource', 'project', 'request_id', ), + 'move_instance': ('instance_move_request_resource', 'project', 'request_id', ), + 'patch': ('autoscaler_resource', 'project', 'zone', 'autoscaler', 'request_id', ), + 'patch_per_instance_configs': ('instance_group_manager', 'instance_group_managers_patch_per_instance_configs_req_resource', 'project', 'zone', 'request_id', ), + 'patch_rule': ('firewall_policy', 'firewall_policy_rule_resource', 'priority', 'request_id', ), + 'preview': ('project', 'region', 'router', 'router_resource', ), + 'recreate_instances': ('instance_group_manager', 'instance_group_managers_recreate_instances_request_resource', 'project', 'zone', 'request_id', ), + 'remove_association': ('firewall_policy', 'name', 'request_id', ), + 'remove_health_check': ('project', 'region', 'target_pool', 'target_pools_remove_health_check_request_resource', 'request_id', ), + 'remove_instance': ('project', 'region', 'target_pool', 'target_pools_remove_instance_request_resource', 'request_id', ), + 'remove_instances': ('instance_group', 'instance_groups_remove_instances_request_resource', 'project', 'zone', 'request_id', ), + 'remove_peering': ('network', 'networks_remove_peering_request_resource', 'project', 'request_id', ), + 'remove_resource_policies': ('disk', 'disks_remove_resource_policies_request_resource', 'project', 'zone', 'request_id', ), + 'remove_rule': ('firewall_policy', 'priority', 'request_id', ), + 'reset': ('instance', 'project', 'zone', 'request_id', ), + 'resize': ('disk', 'disks_resize_request_resource', 'project', 'zone', 'request_id', ), + 'send_diagnostic_interrupt': ('instance', 'project', 'zone', ), + 'set_backend_service': ('project', 'target_ssl_proxies_set_backend_service_request_resource', 'target_ssl_proxy', 'request_id', ), + 'set_backup': ('project', 'region', 'target_pool', 'target_reference_resource', 'failover_ratio', 'request_id', ), + 'set_common_instance_metadata': ('metadata_resource', 'project', 'request_id', ), + 'set_default_network_tier': ('project', 'projects_set_default_network_tier_request_resource', 'request_id', ), + 'set_deletion_protection': ('project', 'resource', 'zone', 'deletion_protection', 'request_id', ), + 'set_disk_auto_delete': ('auto_delete', 'device_name', 'instance', 'project', 'zone', 'request_id', ), + 'set_iam_policy': ('project', 'resource', 'zone', 'zone_set_policy_request_resource', ), + 'set_instance_template': ('instance_group_manager', 'instance_group_managers_set_instance_template_request_resource', 'project', 'zone', 'request_id', ), + 'set_labels': ('project', 'resource', 'zone', 'zone_set_labels_request_resource', 'request_id', ), + 'set_machine_resources': ('instance', 'instances_set_machine_resources_request_resource', 'project', 'zone', 'request_id', ), + 'set_machine_type': ('instance', 'instances_set_machine_type_request_resource', 'project', 'zone', 'request_id', ), + 'set_metadata': ('instance', 'metadata_resource', 'project', 'zone', 'request_id', ), + 'set_min_cpu_platform': ('instance', 'instances_set_min_cpu_platform_request_resource', 'project', 'zone', 'request_id', ), + 'set_named_ports': ('instance_group', 'instance_groups_set_named_ports_request_resource', 'project', 'zone', 'request_id', ), + 'set_node_template': ('node_group', 'node_groups_set_node_template_request_resource', 'project', 'zone', 'request_id', ), + 'set_private_ip_google_access': ('project', 'region', 'subnetwork', 'subnetworks_set_private_ip_google_access_request_resource', 'request_id', ), + 'set_proxy_header': ('project', 'target_ssl_proxies_set_proxy_header_request_resource', 'target_ssl_proxy', 'request_id', ), + 'set_quic_override': ('project', 'target_https_proxies_set_quic_override_request_resource', 'target_https_proxy', 'request_id', ), + 'set_scheduling': ('instance', 'project', 'scheduling_resource', 'zone', 'request_id', ), + 'set_security_policy': ('backend_service', 'project', 'security_policy_reference_resource', 'request_id', ), + 'set_service_account': ('instance', 'instances_set_service_account_request_resource', 'project', 'zone', 'request_id', ), + 'set_shielded_instance_integrity_policy': ('instance', 'project', 'shielded_instance_integrity_policy_resource', 'zone', 'request_id', ), + 'set_ssl_certificates': ('project', 'region', 'region_target_https_proxies_set_ssl_certificates_request_resource', 'target_https_proxy', 'request_id', ), + 'set_ssl_policy': ('project', 'ssl_policy_reference_resource', 'target_https_proxy', 'request_id', ), + 'set_tags': ('instance', 'project', 'tags_resource', 'zone', 'request_id', ), + 'set_target': ('forwarding_rule', 'project', 'region', 'target_reference_resource', 'request_id', ), + 'set_target_pools': ('instance_group_manager', 'instance_group_managers_set_target_pools_request_resource', 'project', 'zone', 'request_id', ), + 'set_url_map': ('project', 'region', 'target_http_proxy', 'url_map_reference_resource', 'request_id', ), + 'set_usage_export_bucket': ('project', 'usage_export_location_resource', 'request_id', ), + 'simulate_maintenance_event': ('instance', 'project', 'zone', ), + 'start': ('instance', 'project', 'zone', 'request_id', ), + 'start_with_encryption_key': ('instance', 'instances_start_with_encryption_key_request_resource', 'project', 'zone', 'request_id', ), + 'stop': ('instance', 'project', 'zone', 'request_id', ), + 'switch_to_custom_mode': ('network', 'project', 'request_id', ), + 'test_iam_permissions': ('project', 'resource', 'test_permissions_request_resource', 'zone', ), + 'update': ('autoscaler_resource', 'project', 'zone', 'autoscaler', 'request_id', ), + 'update_access_config': ('access_config_resource', 'instance', 'network_interface', 'project', 'zone', 'request_id', ), + 'update_display_device': ('display_device_resource', 'instance', 'project', 'zone', 'request_id', ), + 'update_network_interface': ('instance', 'network_interface', 'network_interface_resource', 'project', 'zone', 'request_id', ), + 'update_peering': ('network', 'networks_update_peering_request_resource', 'project', 'request_id', ), + 'update_per_instance_configs': ('instance_group_manager', 'instance_group_managers_update_per_instance_configs_req_resource', 'project', 'zone', 'request_id', ), + 'update_shielded_instance_config': ('instance', 'project', 'shielded_instance_config_resource', 'zone', 'request_id', ), + 'validate': ('project', 'region', 'region_url_maps_validate_request_resource', 'url_map', ), + 'wait': ('operation', 'project', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=computeCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the compute client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index 5042d98a7..5e778c1fc 100644 --- a/setup.py +++ b/setup.py @@ -44,12 +44,8 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - # NOTE: Maintainers, please do not require google-api-core>=2.x.x - # Until this issue is closed - # https://github.com/googleapis/google-cloud-python/issues/10566 - "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - "proto-plus >= 1.13.0", - "packaging >= 14.3", + "google-api-core[grpc] >= 2.2.0, <3.0.0dev", + "proto-plus >= 1.19.7", ), python_requires=">=3.6", classifiers=[ diff --git a/synth.py b/synth.py deleted file mode 100644 index a6eb56c07..000000000 --- a/synth.py +++ /dev/null @@ -1,62 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import os - -import synthtool as s -import synthtool.gcp as gcp -from synthtool.languages import python - -gapic = gcp.GAPICBazel() -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Generate Compute Engine GAPIC layer -# ---------------------------------------------------------------------------- -versions = ["v1"] -for version in versions: - library = gapic.py_library( - service="compute", - version="v1", - bazel_target="//google/cloud/compute/v1:compute-v1-py", - diregapic=True, - ) - s.move(library, excludes=["setup.py", "README.rst", "docs/index.rst", "docs/multiprocessing.rst", f"scripts/fixup_compute_{version}_keywords.py"]) - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- - -templated_files = common.py_library(cov_level=96, microgenerator=True) -s.move( - templated_files, excludes=[".coveragerc"] # the microgenerator has a good coveragerc file -) - -# -------------------------------------------------------------------------- -# Samples templates -# -------------------------------------------------------------------------- -python.py_samples() - -# Don't treat docs warnings as errors -# A few errors like: -# docstring of google.cloud.compute_v1.types.compute.InterconnectOutageNotification:31: WARNING: Unknown target name: "it". -s.replace( - "noxfile.py", - '''['"]-W['"].*''', - "", -) - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index e93042f1a..2eaf5a509 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,6 +5,5 @@ # e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", # Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.28.0 -proto-plus==1.13.0 -packaging==14.3 +google-api-core==2.2.0 +proto-plus==1.19.7 diff --git a/tests/system/test_smoke.py b/tests/system/test_smoke.py index 0668cea6c..5d6f15f2a 100644 --- a/tests/system/test_smoke.py +++ b/tests/system/test_smoke.py @@ -63,9 +63,7 @@ def test_aggregated_list(self): self.assertTrue(presented) def test_client_error(self): - with self.assertRaises( - expected_exception=google.api_core.exceptions.BadRequest - ): + with self.assertRaises(expected_exception=ValueError): self.client.get(instance=self.name, zone=self.DEFAULT_ZONE) def test_api_error(self): diff --git a/tests/unit/gapic/compute_v1/test_accelerator_types.py b/tests/unit/gapic/compute_v1/test_accelerator_types.py index c9ac506a0..db47e9b6d 100644 --- a/tests/unit/gapic/compute_v1/test_accelerator_types.py +++ b/tests/unit/gapic/compute_v1/test_accelerator_types.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.accelerator_types import AcceleratorTypesClient from google.cloud.compute_v1.services.accelerator_types import pagers from google.cloud.compute_v1.services.accelerator_types import transports -from google.cloud.compute_v1.services.accelerator_types.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_accelerator_types_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_accelerator_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_accelerator_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_accelerator_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_accelerator_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_accelerator_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_accelerator_types_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_accelerator_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_accelerator_types_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_accelerator_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_accelerator_types_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_accelerator_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_accelerator_types_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_accelerator_types_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_accelerator_types_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_accelerator_types_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_accelerator_types_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_accelerator_types_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,35 +427,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AcceleratorTypeAggregatedList( id="id_value", - items={ - "key_value": compute.AcceleratorTypesScopedList( - accelerator_types=[ - compute.AcceleratorType( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AcceleratorTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AcceleratorTypeAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -470,26 +453,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.AcceleratorTypesScopedList( - accelerator_types=[ - compute.AcceleratorType(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListAcceleratorTypesRequest +): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -497,27 +497,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.AcceleratorTypeAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.AcceleratorTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AcceleratorTypeAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/acceleratorTypes" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -527,11 +536,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AcceleratorTypeAggregatedList( @@ -567,10 +578,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.AcceleratorTypesScopedList) assert pager.get("h") is None @@ -588,7 +598,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.AcceleratorTypesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -600,16 +610,19 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "accelerator_type": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AcceleratorType( creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", id=205, kind="kind_value", @@ -620,9 +633,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.AcceleratorType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AcceleratorType.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -630,7 +643,6 @@ def test_get_rest( # Establish that the response is the type that we expect. assert isinstance(response, compute.AcceleratorType) assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.id == 205 assert response.kind == "kind_value" @@ -640,12 +652,41 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetAcceleratorTypeRequest +): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "accelerator_type": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -653,33 +694,44 @@ def test_get_rest_flattened(): return_value = compute.AcceleratorType() # Wrap the value into a proper Response obj - json_return_value = compute.AcceleratorType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AcceleratorType.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "accelerator_type": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", accelerator_type="accelerator_type_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "accelerator_type_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes/{accelerator_type}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -699,28 +751,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AcceleratorTypeList( id="id_value", - items=[ - compute.AcceleratorType(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AcceleratorTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AcceleratorTypeList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -728,21 +776,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.AcceleratorType(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListAcceleratorTypesRequest +): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -750,30 +819,36 @@ def test_list_rest_flattened(): return_value = compute.AcceleratorTypeList() # Wrap the value into a proper Response obj - json_return_value = compute.AcceleratorTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AcceleratorTypeList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/acceleratorTypes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -785,11 +860,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = AcceleratorTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AcceleratorTypeList( @@ -819,16 +896,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.AcceleratorType) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -911,8 +987,10 @@ def test_accelerator_types_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_accelerator_types_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -937,30 +1015,6 @@ def test_accelerator_types_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_accelerator_types_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.accelerator_types.transports.AcceleratorTypesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AcceleratorTypesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_accelerator_types_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -972,7 +1026,6 @@ def test_accelerator_types_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_accelerator_types_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -989,22 +1042,6 @@ def test_accelerator_types_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_accelerator_types_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AcceleratorTypesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_accelerator_types_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1151,3 +1188,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = AcceleratorTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_addresses.py b/tests/unit/gapic/compute_v1/test_addresses.py index 1f067fa88..9f0e2cab1 100644 --- a/tests/unit/gapic/compute_v1/test_addresses.py +++ b/tests/unit/gapic/compute_v1/test_addresses.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.addresses import AddressesClient from google.cloud.compute_v1.services.addresses import pagers from google.cloud.compute_v1.services.addresses import transports -from google.cloud.compute_v1.services.addresses.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_addresses_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_addresses_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_addresses_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_addresses_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_addresses_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_addresses_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_addresses_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_addresses_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_addresses_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_addresses_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_addresses_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,31 +404,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AddressAggregatedList( id="id_value", - items={ - "key_value": compute.AddressesScopedList( - addresses=[compute.Address(address="address_value")] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AddressAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AddressAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -443,24 +430,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.AddressesScopedList( - addresses=[compute.Address(address="address_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListAddressesRequest +): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -468,27 +474,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.AddressAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.AddressAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AddressAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/addresses" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -498,11 +513,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AddressAggregatedList( @@ -535,10 +552,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.AddressesScopedList) assert pager.get("h") is None @@ -553,7 +569,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.AddressesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -565,9 +581,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "address": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -577,7 +593,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -595,14 +610,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -613,7 +627,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -631,18 +644,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteAddressRequest +): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "address": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -650,31 +685,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "address": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", address="address_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "address_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -692,9 +738,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetAddressReques credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "address": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -720,9 +766,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetAddressReques ) # Wrap the value into a proper Response obj - json_return_value = compute.Address.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Address.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -748,12 +794,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetAddressReques assert response.users == ["users_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetAddressRequest +): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "address": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -761,31 +832,42 @@ def test_get_rest_flattened(): return_value = compute.Address() # Wrap the value into a proper Response obj - json_return_value = compute.Address.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Address.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "address": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", address="address_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "address_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/addresses/{address}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -805,9 +887,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["address_resource"] = compute.Address(address="address_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -817,7 +900,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -835,14 +917,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -853,7 +934,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -871,18 +951,41 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertAddressRequest +): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["address_resource"] = compute.Address(address="address_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -890,38 +993,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - address_resource = compute.Address(address="address_value") - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - address_resource=address_resource, + address_resource=compute.Address(address="address_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Address.to_json( - address_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/addresses" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -939,26 +1044,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListAddressesRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AddressList( id="id_value", - items=[compute.Address(address="address_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AddressList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AddressList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -966,19 +1069,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListAddressesRe # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Address(address="address_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListAddressesRequest +): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -986,30 +1112,36 @@ def test_list_rest_flattened(): return_value = compute.AddressList() # Wrap the value into a proper Response obj - json_return_value = compute.AddressList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AddressList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/addresses" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1021,11 +1153,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = AddressesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AddressList( @@ -1047,16 +1181,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Address) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1141,8 +1274,10 @@ def test_addresses_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_addresses_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1166,29 +1301,6 @@ def test_addresses_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_addresses_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.addresses.transports.AddressesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AddressesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_addresses_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1200,7 +1312,6 @@ def test_addresses_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_addresses_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1216,21 +1327,6 @@ def test_addresses_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_addresses_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AddressesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_addresses_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1377,3 +1473,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = AddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_autoscalers.py b/tests/unit/gapic/compute_v1/test_autoscalers.py index ec7777b4f..e0acbd92e 100644 --- a/tests/unit/gapic/compute_v1/test_autoscalers.py +++ b/tests/unit/gapic/compute_v1/test_autoscalers.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.autoscalers import AutoscalersClient from google.cloud.compute_v1.services.autoscalers import pagers from google.cloud.compute_v1.services.autoscalers import transports -from google.cloud.compute_v1.services.autoscalers.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_autoscalers_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_autoscalers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_autoscalers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_autoscalers_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_autoscalers_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_autoscalers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_autoscalers_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_autoscalers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_autoscalers_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_autoscalers_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_autoscalers_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_autoscalers_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_autoscalers_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_autoscalers_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,37 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AutoscalerAggregatedList( id="id_value", - items={ - "key_value": compute.AutoscalersScopedList( - autoscalers=[ - compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy( - cool_down_period_sec=2112 - ) - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AutoscalerAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AutoscalerAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -453,30 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.AutoscalersScopedList( - autoscalers=[ - compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy( - cool_down_period_sec=2112 - ) - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListAutoscalersRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -484,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.AutoscalerAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.AutoscalerAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AutoscalerAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/autoscalers" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -514,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AutoscalerAggregatedList( @@ -551,10 +556,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.AutoscalersScopedList) assert pager.get("h") is None @@ -572,7 +576,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.AutoscalersScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -584,9 +588,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -596,7 +600,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -614,14 +617,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -632,7 +634,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -650,18 +651,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteAutoscalerRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -669,31 +692,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "autoscaler": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", autoscaler="autoscaler_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "autoscaler_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -711,15 +745,14 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetAutoscalerReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112), creation_timestamp="creation_timestamp_value", description="description_value", id=205, @@ -727,31 +760,22 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetAutoscalerReq name="name_value", recommended_size=1693, region="region_value", - scaling_schedule_status={ - "key_value": compute.ScalingScheduleStatus( - last_start_time="last_start_time_value" - ) - }, self_link="self_link_value", status=compute.Autoscaler.Status.ACTIVE, - status_details=[compute.AutoscalerStatusDetails(message="message_value")], target="target_value", zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Autoscaler.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Autoscaler.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Autoscaler) - assert response.autoscaling_policy == compute.AutoscalingPolicy( - cool_down_period_sec=2112 - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.id == 205 @@ -759,26 +783,43 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetAutoscalerReq assert response.name == "name_value" assert response.recommended_size == 1693 assert response.region == "region_value" - assert response.scaling_schedule_status == { - "key_value": compute.ScalingScheduleStatus( - last_start_time="last_start_time_value" - ) - } assert response.self_link == "self_link_value" assert response.status == compute.Autoscaler.Status.ACTIVE - assert response.status_details == [ - compute.AutoscalerStatusDetails(message="message_value") - ] assert response.target == "target_value" assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetAutoscalerRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -786,31 +827,42 @@ def test_get_rest_flattened(): return_value = compute.Autoscaler() # Wrap the value into a proper Response obj - json_return_value = compute.Autoscaler.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Autoscaler.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "autoscaler": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", autoscaler="autoscaler_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "autoscaler_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/autoscalers/{autoscaler}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -830,9 +882,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -842,7 +897,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -860,14 +914,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -878,7 +931,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -896,18 +948,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertAutoscalerRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -915,40 +992,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - autoscaler_resource = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - autoscaler_resource=autoscaler_resource, + autoscaler_resource=compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.Autoscaler.to_json( - autoscaler_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -970,32 +1049,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AutoscalerList( id="id_value", - items=[ - compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy( - cool_down_period_sec=2112 - ) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AutoscalerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AutoscalerList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1003,23 +1074,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListAutoscalersRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1027,30 +1117,36 @@ def test_list_rest_flattened(): return_value = compute.AutoscalerList() # Wrap the value into a proper Response obj - json_return_value = compute.AutoscalerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AutoscalerList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1062,11 +1158,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AutoscalerList( @@ -1096,16 +1194,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Autoscaler) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1117,9 +1214,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1129,7 +1229,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1147,14 +1246,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1165,7 +1263,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1183,18 +1280,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchAutoscalerRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1202,40 +1324,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - autoscaler_resource = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - autoscaler_resource=autoscaler_resource, + autoscaler_resource=compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.Autoscaler.to_json( - autoscaler_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1257,9 +1381,12 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1269,7 +1396,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1287,14 +1413,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1305,7 +1430,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1323,18 +1447,43 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateAutoscalerRequest +): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1342,40 +1491,42 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - autoscaler_resource = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - autoscaler_resource=autoscaler_resource, + autoscaler_resource=compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.Autoscaler.to_json( - autoscaler_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = AutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/autoscalers" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1472,8 +1623,10 @@ def test_autoscalers_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_autoscalers_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1497,29 +1650,6 @@ def test_autoscalers_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_autoscalers_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.autoscalers.transports.AutoscalersTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.AutoscalersTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_autoscalers_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1531,7 +1661,6 @@ def test_autoscalers_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_autoscalers_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1547,21 +1676,6 @@ def test_autoscalers_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_autoscalers_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AutoscalersClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_autoscalers_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1708,3 +1822,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = AutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_backend_buckets.py b/tests/unit/gapic/compute_v1/test_backend_buckets.py index 520af50d5..794730e95 100644 --- a/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.backend_buckets import BackendBucketsClient from google.cloud.compute_v1.services.backend_buckets import pagers from google.cloud.compute_v1.services.backend_buckets import transports -from google.cloud.compute_v1.services.backend_buckets.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_backend_buckets_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_backend_buckets_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_backend_buckets_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_backend_buckets_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_backend_buckets_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_backend_buckets_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_backend_buckets_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_backend_buckets_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -296,7 +284,7 @@ def test_backend_buckets_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -313,6 +301,7 @@ def test_backend_buckets_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -337,7 +326,7 @@ def test_backend_buckets_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -346,6 +335,7 @@ def test_backend_buckets_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -358,7 +348,7 @@ def test_backend_buckets_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -367,6 +357,7 @@ def test_backend_buckets_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -381,7 +372,7 @@ def test_backend_buckets_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -390,6 +381,7 @@ def test_backend_buckets_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -404,7 +396,7 @@ def test_backend_buckets_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -413,6 +405,7 @@ def test_backend_buckets_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -423,9 +416,12 @@ def test_add_signed_url_key_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["signed_url_key_resource"] = compute.SignedUrlKey( + key_name="key_name_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -435,7 +431,6 @@ def test_add_signed_url_key_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -453,14 +448,13 @@ def test_add_signed_url_key_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_signed_url_key(request) @@ -471,7 +465,6 @@ def test_add_signed_url_key_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -489,18 +482,43 @@ def test_add_signed_url_key_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_signed_url_key_rest_bad_request( + transport: str = "rest", request_type=compute.AddSignedUrlKeyBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["signed_url_key_resource"] = compute.SignedUrlKey( + key_name="key_name_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_signed_url_key(request) + + def test_add_signed_url_key_rest_from_dict(): test_add_signed_url_key_rest(request_type=dict) -def test_add_signed_url_key_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_signed_url_key_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -508,38 +526,40 @@ def test_add_signed_url_key_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - signed_url_key_resource = compute.SignedUrlKey(key_name="key_name_value") - client.add_signed_url_key( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_bucket": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_bucket="backend_bucket_value", - signed_url_key_resource=signed_url_key_resource, + signed_url_key_resource=compute.SignedUrlKey(key_name="key_name_value"), ) + mock_args.update(sample_request) + client.add_signed_url_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_bucket_value" in http_call[1] + str(body) + str(params) - assert compute.SignedUrlKey.to_json( - signed_url_key_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_signed_url_key_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/addSignedUrlKey" + % client.transport._host, + args[1], + ) + + +def test_add_signed_url_key_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -559,9 +579,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -571,7 +591,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -589,14 +608,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -607,7 +625,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -625,18 +642,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -644,30 +683,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_bucket": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_bucket="backend_bucket_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_bucket_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -686,9 +733,9 @@ def test_delete_signed_url_key_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -698,7 +745,6 @@ def test_delete_signed_url_key_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -716,14 +762,13 @@ def test_delete_signed_url_key_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_signed_url_key(request) @@ -734,7 +779,6 @@ def test_delete_signed_url_key_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -752,18 +796,40 @@ def test_delete_signed_url_key_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_signed_url_key_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteSignedUrlKeyBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_signed_url_key(request) + + def test_delete_signed_url_key_rest_from_dict(): test_delete_signed_url_key_rest(request_type=dict) -def test_delete_signed_url_key_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_signed_url_key_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -771,33 +837,40 @@ def test_delete_signed_url_key_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_signed_url_key( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_bucket": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_bucket="backend_bucket_value", key_name="key_name_value", ) + mock_args.update(sample_request) + client.delete_signed_url_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_bucket_value" in http_call[1] + str(body) + str(params) - assert "key_name_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}/deleteSignedUrlKey" + % client.transport._host, + args[1], + ) -def test_delete_signed_url_key_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_signed_url_key_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -817,22 +890,15 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendBucket( bucket_name="bucket_name_value", - cdn_policy=compute.BackendBucketCdnPolicy( - bypass_cache_on_request_headers=[ - compute.BackendBucketCdnPolicyBypassCacheOnRequestHeader( - header_name="header_name_value" - ) - ] - ), creation_timestamp="creation_timestamp_value", custom_response_headers=["custom_response_headers_value"], description="description_value", @@ -844,9 +910,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendBucket.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendBucket.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -854,13 +920,6 @@ def test_get_rest( # Establish that the response is the type that we expect. assert isinstance(response, compute.BackendBucket) assert response.bucket_name == "bucket_name_value" - assert response.cdn_policy == compute.BackendBucketCdnPolicy( - bypass_cache_on_request_headers=[ - compute.BackendBucketCdnPolicyBypassCacheOnRequestHeader( - header_name="header_name_value" - ) - ] - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.custom_response_headers == ["custom_response_headers_value"] assert response.description == "description_value" @@ -871,12 +930,37 @@ def test_get_rest( assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -884,30 +968,38 @@ def test_get_rest_flattened(): return_value = compute.BackendBucket() # Wrap the value into a proper Response obj - json_return_value = compute.BackendBucket.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendBucket.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_bucket": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_bucket="backend_bucket_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_bucket_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -926,9 +1018,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["backend_bucket_resource"] = compute.BackendBucket( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -938,7 +1033,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -956,14 +1050,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -974,7 +1067,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -992,18 +1084,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["backend_bucket_resource"] = compute.BackendBucket( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1011,35 +1128,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_bucket_resource = compute.BackendBucket(bucket_name="bucket_name_value") - client.insert( - project="project_value", backend_bucket_resource=backend_bucket_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.BackendBucket.to_json( - backend_bucket_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1060,26 +1183,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendBucketList( id="id_value", - items=[compute.BackendBucket(bucket_name="bucket_name_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendBucketList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendBucketList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1087,19 +1208,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.BackendBucket(bucket_name="bucket_name_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListBackendBucketsRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1107,27 +1251,36 @@ def test_list_rest_flattened(): return_value = compute.BackendBucketList() # Wrap the value into a proper Response obj - json_return_value = compute.BackendBucketList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendBucketList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1137,11 +1290,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.BackendBucketList( @@ -1171,16 +1326,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.BackendBucket) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1192,9 +1346,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["backend_bucket_resource"] = compute.BackendBucket( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1204,7 +1361,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1222,14 +1378,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1240,7 +1395,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1258,18 +1412,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["backend_bucket_resource"] = compute.BackendBucket( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1277,38 +1456,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_bucket_resource = compute.BackendBucket(bucket_name="bucket_name_value") - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_bucket": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_bucket="backend_bucket_value", - backend_bucket_resource=backend_bucket_resource, + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_bucket_value" in http_call[1] + str(body) + str(params) - assert compute.BackendBucket.to_json( - backend_bucket_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1330,9 +1513,12 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["backend_bucket_resource"] = compute.BackendBucket( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1342,7 +1528,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1360,14 +1545,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1378,7 +1562,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1396,18 +1579,43 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateBackendBucketRequest +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_bucket": "sample2"} + request_init["backend_bucket_resource"] = compute.BackendBucket( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1415,38 +1623,42 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_bucket_resource = compute.BackendBucket(bucket_name="bucket_name_value") - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_bucket": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_bucket="backend_bucket_value", - backend_bucket_resource=backend_bucket_resource, + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_bucket_value" in http_call[1] + str(body) + str(params) - assert compute.BackendBucket.to_json( - backend_bucket_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = BackendBucketsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1544,8 +1756,10 @@ def test_backend_buckets_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_backend_buckets_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1569,29 +1783,6 @@ def test_backend_buckets_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_backend_buckets_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.backend_buckets.transports.BackendBucketsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BackendBucketsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_backend_buckets_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1603,7 +1794,6 @@ def test_backend_buckets_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_backend_buckets_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1619,21 +1809,6 @@ def test_backend_buckets_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_backend_buckets_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BackendBucketsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_backend_buckets_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1780,3 +1955,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_backend_services.py b/tests/unit/gapic/compute_v1/test_backend_services.py index 8c80b106f..35f36615f 100644 --- a/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/tests/unit/gapic/compute_v1/test_backend_services.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.backend_services import BackendServicesClient from google.cloud.compute_v1.services.backend_services import pagers from google.cloud.compute_v1.services.backend_services import transports -from google.cloud.compute_v1.services.backend_services.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_backend_services_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_backend_services_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_backend_services_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_backend_services_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_backend_services_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_backend_services_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_backend_services_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_backend_services_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_backend_services_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_backend_services_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_backend_services_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_backend_services_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_backend_services_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_backend_services_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,9 +426,12 @@ def test_add_signed_url_key_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["signed_url_key_resource"] = compute.SignedUrlKey( + key_name="key_name_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -445,7 +441,6 @@ def test_add_signed_url_key_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -463,14 +458,13 @@ def test_add_signed_url_key_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_signed_url_key(request) @@ -481,7 +475,6 @@ def test_add_signed_url_key_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -499,18 +492,43 @@ def test_add_signed_url_key_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_signed_url_key_rest_bad_request( + transport: str = "rest", request_type=compute.AddSignedUrlKeyBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["signed_url_key_resource"] = compute.SignedUrlKey( + key_name="key_name_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_signed_url_key(request) + + def test_add_signed_url_key_rest_from_dict(): test_add_signed_url_key_rest(request_type=dict) -def test_add_signed_url_key_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_signed_url_key_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -518,38 +536,40 @@ def test_add_signed_url_key_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - signed_url_key_resource = compute.SignedUrlKey(key_name="key_name_value") - client.add_signed_url_key( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", - signed_url_key_resource=signed_url_key_resource, + signed_url_key_resource=compute.SignedUrlKey(key_name="key_name_value"), ) + mock_args.update(sample_request) + client.add_signed_url_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.SignedUrlKey.to_json( - signed_url_key_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_signed_url_key_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/addSignedUrlKey" + % client.transport._host, + args[1], + ) + + +def test_add_signed_url_key_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -569,33 +589,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendServiceAggregatedList( id="id_value", - items={ - "key_value": compute.BackendServicesScopedList( - backend_services=[ - compute.BackendService(affinity_cookie_ttl_sec=2432) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -603,24 +615,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.BackendServicesScopedList( - backend_services=[compute.BackendService(affinity_cookie_ttl_sec=2432)] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListBackendServicesRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -628,27 +659,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.BackendServiceAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/backendServices" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -658,11 +698,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.BackendServiceAggregatedList( @@ -698,10 +740,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.BackendServicesScopedList) assert pager.get("h") is None @@ -719,7 +760,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.BackendServicesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -731,9 +772,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -743,7 +784,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -761,14 +801,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -779,7 +818,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -797,18 +835,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -816,30 +876,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -859,9 +927,9 @@ def test_delete_signed_url_key_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -871,7 +939,6 @@ def test_delete_signed_url_key_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -889,14 +956,13 @@ def test_delete_signed_url_key_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_signed_url_key(request) @@ -907,7 +973,6 @@ def test_delete_signed_url_key_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -925,18 +990,41 @@ def test_delete_signed_url_key_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_signed_url_key_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteSignedUrlKeyBackendServiceRequest, +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_signed_url_key(request) + + def test_delete_signed_url_key_rest_from_dict(): test_delete_signed_url_key_rest(request_type=dict) -def test_delete_signed_url_key_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_signed_url_key_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -944,33 +1032,40 @@ def test_delete_signed_url_key_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_signed_url_key( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", key_name="key_name_value", ) + mock_args.update(sample_request) + client.delete_signed_url_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert "key_name_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/deleteSignedUrlKey" + % client.transport._host, + args[1], + ) -def test_delete_signed_url_key_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_signed_url_key_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -990,74 +1085,42 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendService( affinity_cookie_ttl_sec=2432, - backends=[ - compute.Backend(balancing_mode=compute.Backend.BalancingMode.CONNECTION) - ], - cdn_policy=compute.BackendServiceCdnPolicy( - bypass_cache_on_request_headers=[ - compute.BackendServiceCdnPolicyBypassCacheOnRequestHeader( - header_name="header_name_value" - ) - ] - ), - circuit_breakers=compute.CircuitBreakers(max_connections=1608), - connection_draining=compute.ConnectionDraining(draining_timeout_sec=2124), - consistent_hash=compute.ConsistentHashLoadBalancerSettings( - http_cookie=compute.ConsistentHashLoadBalancerSettingsHttpCookie( - name="name_value" - ) - ), creation_timestamp="creation_timestamp_value", custom_request_headers=["custom_request_headers_value"], custom_response_headers=["custom_response_headers_value"], description="description_value", enable_c_d_n=True, - failover_policy=compute.BackendServiceFailoverPolicy( - disable_connection_drain_on_failover=True - ), fingerprint="fingerprint_value", health_checks=["health_checks_value"], - iap=compute.BackendServiceIAP(enabled=True), id=205, kind="kind_value", load_balancing_scheme=compute.BackendService.LoadBalancingScheme.EXTERNAL, locality_lb_policy=compute.BackendService.LocalityLbPolicy.INVALID_LB_POLICY, - log_config=compute.BackendServiceLogConfig(enable=True), - max_stream_duration=compute.Duration(nanos=543), name="name_value", network="network_value", - outlier_detection=compute.OutlierDetection( - base_ejection_time=compute.Duration(nanos=543) - ), port=453, port_name="port_name_value", protocol=compute.BackendService.Protocol.GRPC, region="region_value", security_policy="security_policy_value", - security_settings=compute.SecuritySettings( - client_tls_policy="client_tls_policy_value" - ), self_link="self_link_value", session_affinity=compute.BackendService.SessionAffinity.CLIENT_IP, - subsetting=compute.Subsetting( - policy=compute.Subsetting.Policy.CONSISTENT_HASH_SUBSETTING - ), timeout_sec=1185, ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendService.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendService.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -1065,36 +1128,13 @@ def test_get_rest( # Establish that the response is the type that we expect. assert isinstance(response, compute.BackendService) assert response.affinity_cookie_ttl_sec == 2432 - assert response.backends == [ - compute.Backend(balancing_mode=compute.Backend.BalancingMode.CONNECTION) - ] - assert response.cdn_policy == compute.BackendServiceCdnPolicy( - bypass_cache_on_request_headers=[ - compute.BackendServiceCdnPolicyBypassCacheOnRequestHeader( - header_name="header_name_value" - ) - ] - ) - assert response.circuit_breakers == compute.CircuitBreakers(max_connections=1608) - assert response.connection_draining == compute.ConnectionDraining( - draining_timeout_sec=2124 - ) - assert response.consistent_hash == compute.ConsistentHashLoadBalancerSettings( - http_cookie=compute.ConsistentHashLoadBalancerSettingsHttpCookie( - name="name_value" - ) - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.custom_request_headers == ["custom_request_headers_value"] assert response.custom_response_headers == ["custom_response_headers_value"] assert response.description == "description_value" assert response.enable_c_d_n is True - assert response.failover_policy == compute.BackendServiceFailoverPolicy( - disable_connection_drain_on_failover=True - ) assert response.fingerprint == "fingerprint_value" assert response.health_checks == ["health_checks_value"] - assert response.iap == compute.BackendServiceIAP(enabled=True) assert response.id == 205 assert response.kind == "kind_value" assert ( @@ -1105,35 +1145,49 @@ def test_get_rest( response.locality_lb_policy == compute.BackendService.LocalityLbPolicy.INVALID_LB_POLICY ) - assert response.log_config == compute.BackendServiceLogConfig(enable=True) - assert response.max_stream_duration == compute.Duration(nanos=543) assert response.name == "name_value" assert response.network == "network_value" - assert response.outlier_detection == compute.OutlierDetection( - base_ejection_time=compute.Duration(nanos=543) - ) assert response.port == 453 assert response.port_name == "port_name_value" assert response.protocol == compute.BackendService.Protocol.GRPC assert response.region == "region_value" assert response.security_policy == "security_policy_value" - assert response.security_settings == compute.SecuritySettings( - client_tls_policy="client_tls_policy_value" - ) assert response.self_link == "self_link_value" assert response.session_affinity == compute.BackendService.SessionAffinity.CLIENT_IP - assert response.subsetting == compute.Subsetting( - policy=compute.Subsetting.Policy.CONSISTENT_HASH_SUBSETTING - ) assert response.timeout_sec == 1185 +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1141,30 +1195,38 @@ def test_get_rest_flattened(): return_value = compute.BackendService() # Wrap the value into a proper Response obj - json_return_value = compute.BackendService.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendService.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1183,44 +1245,65 @@ def test_get_health_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["resource_group_reference_resource"] = compute.ResourceGroupReference( + group="group_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.BackendServiceGroupHealth( - annotations={"key_value": "value_value"}, - health_status=[ - compute.HealthStatus(annotations={"key_value": "value_value"}) - ], - kind="kind_value", - ) + return_value = compute.BackendServiceGroupHealth(kind="kind_value",) # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_health(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.BackendServiceGroupHealth) - assert response.annotations == {"key_value": "value_value"} - assert response.health_status == [ - compute.HealthStatus(annotations={"key_value": "value_value"}) - ] assert response.kind == "kind_value" +def test_get_health_rest_bad_request( + transport: str = "rest", request_type=compute.GetHealthBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["resource_group_reference_resource"] = compute.ResourceGroupReference( + group="group_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_health(request) + + def test_get_health_rest_from_dict(): test_get_health_rest(request_type=dict) -def test_get_health_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_health_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1228,40 +1311,42 @@ def test_get_health_rest_flattened(): return_value = compute.BackendServiceGroupHealth() # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - resource_group_reference_resource = compute.ResourceGroupReference( - group="group_value" - ) - client.get_health( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", - resource_group_reference_resource=resource_group_reference_resource, + resource_group_reference_resource=compute.ResourceGroupReference( + group="group_value" + ), ) + mock_args.update(sample_request) + client.get_health(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.ResourceGroupReference.to_json( - resource_group_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_get_health_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1283,9 +1368,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1295,7 +1383,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1313,14 +1400,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1331,7 +1417,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1349,18 +1434,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1368,35 +1478,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_service_resource = compute.BackendService(affinity_cookie_ttl_sec=2432) - client.insert( - project="project_value", backend_service_resource=backend_service_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.BackendService.to_json( - backend_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1417,26 +1533,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendServiceList( id="id_value", - items=[compute.BackendService(affinity_cookie_ttl_sec=2432)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1444,19 +1558,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.BackendService(affinity_cookie_ttl_sec=2432)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListBackendServicesRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1464,27 +1601,36 @@ def test_list_rest_flattened(): return_value = compute.BackendServiceList() # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1494,11 +1640,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.BackendServiceList( @@ -1528,16 +1676,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.BackendService) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1549,9 +1696,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1561,7 +1711,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1579,14 +1728,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1597,7 +1745,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1615,18 +1762,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1634,38 +1806,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_service_resource = compute.BackendService(affinity_cookie_ttl_sec=2432) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", - backend_service_resource=backend_service_resource, + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.BackendService.to_json( - backend_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1687,9 +1863,12 @@ def test_set_security_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init[ + "security_policy_reference_resource" + ] = compute.SecurityPolicyReference(security_policy="security_policy_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1699,7 +1878,6 @@ def test_set_security_policy_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1717,14 +1895,13 @@ def test_set_security_policy_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_security_policy(request) @@ -1735,7 +1912,6 @@ def test_set_security_policy_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1753,18 +1929,43 @@ def test_set_security_policy_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_security_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSecurityPolicyBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init[ + "security_policy_reference_resource" + ] = compute.SecurityPolicyReference(security_policy="security_policy_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_security_policy(request) + + def test_set_security_policy_rest_from_dict(): test_set_security_policy_rest(request_type=dict) -def test_set_security_policy_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_security_policy_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1772,40 +1973,42 @@ def test_set_security_policy_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - security_policy_reference_resource = compute.SecurityPolicyReference( - security_policy="security_policy_value" - ) - client.set_security_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", - security_policy_reference_resource=security_policy_reference_resource, + security_policy_reference_resource=compute.SecurityPolicyReference( + security_policy="security_policy_value" + ), ) + mock_args.update(sample_request) + client.set_security_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.SecurityPolicyReference.to_json( - security_policy_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_security_policy_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}/setSecurityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_security_policy_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1827,9 +2030,12 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1839,7 +2045,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1857,14 +2062,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1875,7 +2079,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1893,18 +2096,43 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateBackendServiceRequest +): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "backend_service": "sample2"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1912,38 +2140,42 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_service_resource = compute.BackendService(affinity_cookie_ttl_sec=2432) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "backend_service": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", backend_service="backend_service_value", - backend_service_resource=backend_service_resource, + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.BackendService.to_json( - backend_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = BackendServicesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2044,8 +2276,10 @@ def test_backend_services_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_backend_services_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2069,29 +2303,6 @@ def test_backend_services_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_backend_services_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.backend_services.transports.BackendServicesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.BackendServicesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_backend_services_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2103,7 +2314,6 @@ def test_backend_services_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_backend_services_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2119,21 +2329,6 @@ def test_backend_services_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_backend_services_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - BackendServicesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_backend_services_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2280,3 +2475,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = BackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_disk_types.py b/tests/unit/gapic/compute_v1/test_disk_types.py index f4277f95a..0fd86559e 100644 --- a/tests/unit/gapic/compute_v1/test_disk_types.py +++ b/tests/unit/gapic/compute_v1/test_disk_types.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.disk_types import DiskTypesClient from google.cloud.compute_v1.services.disk_types import pagers from google.cloud.compute_v1.services.disk_types import transports -from google.cloud.compute_v1.services.disk_types.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -187,7 +171,7 @@ def test_disk_types_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -196,6 +180,7 @@ def test_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -203,7 +188,7 @@ def test_disk_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -212,6 +197,7 @@ def test_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -219,7 +205,7 @@ def test_disk_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -228,6 +214,7 @@ def test_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -247,7 +234,7 @@ def test_disk_types_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -256,6 +243,7 @@ def test_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -286,7 +274,7 @@ def test_disk_types_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -303,6 +291,7 @@ def test_disk_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -327,7 +316,7 @@ def test_disk_types_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -336,6 +325,7 @@ def test_disk_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -348,7 +338,7 @@ def test_disk_types_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +347,7 @@ def test_disk_types_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -371,7 +362,7 @@ def test_disk_types_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,6 +371,7 @@ def test_disk_types_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -394,7 +386,7 @@ def test_disk_types_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -403,6 +395,7 @@ def test_disk_types_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -413,33 +406,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.DiskTypeAggregatedList( id="id_value", - items={ - "key_value": compute.DiskTypesScopedList( - disk_types=[ - compute.DiskType(creation_timestamp="creation_timestamp_value") - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskTypeAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -447,24 +432,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.DiskTypesScopedList( - disk_types=[compute.DiskType(creation_timestamp="creation_timestamp_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListDiskTypesRequest +): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -472,27 +476,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.DiskTypeAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.DiskTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskTypeAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/diskTypes" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -502,11 +515,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.DiskTypeAggregatedList( @@ -539,10 +554,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.DiskTypesScopedList) assert pager.get("h") is None @@ -557,7 +571,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.DiskTypesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -567,9 +581,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskTypeReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk_type": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -577,7 +591,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskTypeReque return_value = compute.DiskType( creation_timestamp="creation_timestamp_value", default_disk_size_gb=2097, - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", id=205, kind="kind_value", @@ -589,9 +602,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskTypeReque ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskType.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -600,7 +613,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskTypeReque assert isinstance(response, compute.DiskType) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_disk_size_gb == 2097 - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.id == 205 assert response.kind == "kind_value" @@ -611,12 +623,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskTypeReque assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetDiskTypeRequest +): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk_type": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -624,31 +661,42 @@ def test_get_rest_flattened(): return_value = compute.DiskType() # Wrap the value into a proper Response obj - json_return_value = compute.DiskType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskType.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "disk_type": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", disk_type="disk_type_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_type_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/diskTypes/{disk_type}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -666,26 +714,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListDiskTypesRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.DiskTypeList( id="id_value", - items=[compute.DiskType(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskTypeList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -693,21 +739,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListDiskTypesRe # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.DiskType(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListDiskTypesRequest +): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -715,30 +782,36 @@ def test_list_rest_flattened(): return_value = compute.DiskTypeList() # Wrap the value into a proper Response obj - json_return_value = compute.DiskTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskTypeList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/diskTypes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -748,11 +821,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = DiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.DiskTypeList( @@ -774,16 +849,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.DiskType) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -866,8 +940,10 @@ def test_disk_types_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_disk_types_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -892,30 +968,6 @@ def test_disk_types_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_disk_types_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.disk_types.transports.DiskTypesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DiskTypesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_disk_types_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -927,7 +979,6 @@ def test_disk_types_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_disk_types_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -944,22 +995,6 @@ def test_disk_types_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_disk_types_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DiskTypesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_disk_types_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1106,3 +1141,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = DiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_disks.py b/tests/unit/gapic/compute_v1/test_disks.py index bdd1b5b08..7f13a06dc 100644 --- a/tests/unit/gapic/compute_v1/test_disks.py +++ b/tests/unit/gapic/compute_v1/test_disks.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,31 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.disks import DisksClient from google.cloud.compute_v1.services.disks import pagers from google.cloud.compute_v1.services.disks import transports -from google.cloud.compute_v1.services.disks.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -180,7 +166,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -189,6 +175,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -196,7 +183,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +192,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +200,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +209,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -240,7 +229,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -249,6 +238,7 @@ def test_disks_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -279,7 +269,7 @@ def test_disks_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -296,6 +286,7 @@ def test_disks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -320,7 +311,7 @@ def test_disks_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -329,6 +320,7 @@ def test_disks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -341,7 +333,7 @@ def test_disks_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,6 +342,7 @@ def test_disks_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -364,7 +357,7 @@ def test_disks_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -373,6 +366,7 @@ def test_disks_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -387,7 +381,7 @@ def test_disks_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -396,6 +390,7 @@ def test_disks_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,9 +401,14 @@ def test_add_resource_policies_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init[ + "disks_add_resource_policies_request_resource" + ] = compute.DisksAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -418,7 +418,6 @@ def test_add_resource_policies_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -436,14 +435,13 @@ def test_add_resource_policies_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_resource_policies(request) @@ -454,7 +452,6 @@ def test_add_resource_policies_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -472,18 +469,45 @@ def test_add_resource_policies_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_resource_policies_rest_bad_request( + transport: str = "rest", request_type=compute.AddResourcePoliciesDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init[ + "disks_add_resource_policies_request_resource" + ] = compute.DisksAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies(request) + + def test_add_resource_policies_rest_from_dict(): test_add_resource_policies_rest(request_type=dict) -def test_add_resource_policies_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_resource_policies_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -491,42 +515,43 @@ def test_add_resource_policies_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - disks_add_resource_policies_request_resource = compute.DisksAddResourcePoliciesRequest( - resource_policies=["resource_policies_value"] - ) - client.add_resource_policies( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", disk="disk_value", - disks_add_resource_policies_request_resource=disks_add_resource_policies_request_resource, + disks_add_resource_policies_request_resource=compute.DisksAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ), ) + mock_args.update(sample_request) + client.add_resource_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.DisksAddResourcePoliciesRequest.to_json( - disks_add_resource_policies_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_resource_policies_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/addResourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_add_resource_policies_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -549,31 +574,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.DiskAggregatedList( id="id_value", - items={ - "key_value": compute.DisksScopedList( - disks=[compute.Disk(creation_timestamp="creation_timestamp_value")] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -581,24 +600,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.DisksScopedList( - disks=[compute.Disk(creation_timestamp="creation_timestamp_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListDisksRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -606,27 +644,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.DiskAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.DiskAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/disks" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -636,11 +683,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.DiskAggregatedList( @@ -670,10 +719,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.DisksScopedList) assert pager.get("h") is None @@ -688,7 +736,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.DisksScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -700,9 +748,10 @@ def test_create_snapshot_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["snapshot_resource"] = compute.Snapshot(auto_created=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -712,7 +761,6 @@ def test_create_snapshot_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -730,14 +778,13 @@ def test_create_snapshot_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_snapshot(request) @@ -748,7 +795,6 @@ def test_create_snapshot_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -766,18 +812,41 @@ def test_create_snapshot_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_create_snapshot_rest_bad_request( + transport: str = "rest", request_type=compute.CreateSnapshotDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["snapshot_resource"] = compute.Snapshot(auto_created=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot(request) + + def test_create_snapshot_rest_from_dict(): test_create_snapshot_rest(request_type=dict) -def test_create_snapshot_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_create_snapshot_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -785,40 +854,41 @@ def test_create_snapshot_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - snapshot_resource = compute.Snapshot(auto_created=True) - client.create_snapshot( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", disk="disk_value", - snapshot_resource=snapshot_resource, + snapshot_resource=compute.Snapshot(auto_created=True), ) + mock_args.update(sample_request) + client.create_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.Snapshot.to_json( - snapshot_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_create_snapshot_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/createSnapshot" + % client.transport._host, + args[1], + ) + + +def test_create_snapshot_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -837,9 +907,9 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteDiskReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -849,7 +919,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteDiskReq creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -867,14 +936,13 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteDiskReq target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -885,7 +953,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteDiskReq assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -903,18 +970,40 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteDiskReq assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -922,31 +1011,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", zone="zone_value", disk="disk_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value", disk="disk_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -964,9 +1058,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -974,18 +1068,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): return_value = compute.Disk( creation_timestamp="creation_timestamp_value", description="description_value", - disk_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), - guest_os_features=[ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ], id=205, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, last_attach_timestamp="last_attach_timestamp_value", last_detach_timestamp="last_detach_timestamp_value", license_codes=[1360], @@ -1004,14 +1089,8 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): source_disk="source_disk_value", source_disk_id="source_disk_id_value", source_image="source_image_value", - source_image_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_image_id="source_image_id_value", source_snapshot="source_snapshot_value", - source_snapshot_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_snapshot_id="source_snapshot_id_value", source_storage_object="source_storage_object_value", status=compute.Disk.Status.CREATING, @@ -1021,9 +1100,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): ) # Wrap the value into a proper Response obj - json_return_value = compute.Disk.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Disk.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -1032,18 +1111,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): assert isinstance(response, compute.Disk) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.disk_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) - assert response.guest_os_features == [ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ] assert response.id == 205 assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.last_attach_timestamp == "last_attach_timestamp_value" assert response.last_detach_timestamp == "last_detach_timestamp_value" assert response.license_codes == [1360] @@ -1062,14 +1132,8 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): assert response.source_disk == "source_disk_value" assert response.source_disk_id == "source_disk_id_value" assert response.source_image == "source_image_value" - assert response.source_image_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_image_id == "source_image_id_value" assert response.source_snapshot == "source_snapshot_value" - assert response.source_snapshot_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_snapshot_id == "source_snapshot_id_value" assert response.source_storage_object == "source_storage_object_value" assert response.status == compute.Disk.Status.CREATING @@ -1078,12 +1142,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetDiskRequest): assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1091,31 +1180,36 @@ def test_get_rest_flattened(): return_value = compute.Disk() # Wrap the value into a proper Response obj - json_return_value = compute.Disk.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Disk.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", zone="zone_value", disk="disk_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value", disk="disk_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1135,60 +1229,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1196,31 +1291,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1238,9 +1344,12 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertDiskReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["disk_resource"] = compute.Disk( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1250,7 +1359,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertDiskReq creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1268,14 +1376,13 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertDiskReq target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1286,7 +1393,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertDiskReq assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1304,18 +1410,43 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertDiskReq assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["disk_resource"] = compute.Disk( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1323,36 +1454,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - disk_resource = compute.Disk(creation_timestamp="creation_timestamp_value") - client.insert( - project="project_value", zone="zone_value", disk_resource=disk_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.Disk.to_json( - disk_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1370,26 +1505,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListDisksReques credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.DiskList( id="id_value", - items=[compute.Disk(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1397,21 +1530,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListDisksReques # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Disk(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListDisksRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1419,30 +1573,36 @@ def test_list_rest_flattened(): return_value = compute.DiskList() # Wrap the value into a proper Response obj - json_return_value = compute.DiskList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1452,11 +1612,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.DiskList( @@ -1478,16 +1640,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Disk) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1499,9 +1660,14 @@ def test_remove_resource_policies_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init[ + "disks_remove_resource_policies_request_resource" + ] = compute.DisksRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1511,7 +1677,6 @@ def test_remove_resource_policies_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1529,14 +1694,13 @@ def test_remove_resource_policies_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_resource_policies(request) @@ -1547,7 +1711,6 @@ def test_remove_resource_policies_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1565,18 +1728,45 @@ def test_remove_resource_policies_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_resource_policies_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveResourcePoliciesDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init[ + "disks_remove_resource_policies_request_resource" + ] = compute.DisksRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies(request) + + def test_remove_resource_policies_rest_from_dict(): test_remove_resource_policies_rest(request_type=dict) -def test_remove_resource_policies_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_resource_policies_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1584,42 +1774,43 @@ def test_remove_resource_policies_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - disks_remove_resource_policies_request_resource = compute.DisksRemoveResourcePoliciesRequest( - resource_policies=["resource_policies_value"] - ) - client.remove_resource_policies( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", disk="disk_value", - disks_remove_resource_policies_request_resource=disks_remove_resource_policies_request_resource, + disks_remove_resource_policies_request_resource=compute.DisksRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ), ) + mock_args.update(sample_request) + client.remove_resource_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.DisksRemoveResourcePoliciesRequest.to_json( - disks_remove_resource_policies_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_resource_policies_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/removeResourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_remove_resource_policies_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1640,9 +1831,12 @@ def test_resize_rest(transport: str = "rest", request_type=compute.ResizeDiskReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["disks_resize_request_resource"] = compute.DisksResizeRequest( + size_gb=739 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1652,7 +1846,6 @@ def test_resize_rest(transport: str = "rest", request_type=compute.ResizeDiskReq creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1670,14 +1863,13 @@ def test_resize_rest(transport: str = "rest", request_type=compute.ResizeDiskReq target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.resize(request) @@ -1688,7 +1880,6 @@ def test_resize_rest(transport: str = "rest", request_type=compute.ResizeDiskReq assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1706,18 +1897,43 @@ def test_resize_rest(transport: str = "rest", request_type=compute.ResizeDiskReq assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_resize_rest_bad_request( + transport: str = "rest", request_type=compute.ResizeDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["disks_resize_request_resource"] = compute.DisksResizeRequest( + size_gb=739 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + def test_resize_rest_from_dict(): test_resize_rest(request_type=dict) -def test_resize_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_resize_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1725,40 +1941,41 @@ def test_resize_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - disks_resize_request_resource = compute.DisksResizeRequest(size_gb=739) - client.resize( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", disk="disk_value", - disks_resize_request_resource=disks_resize_request_resource, + disks_resize_request_resource=compute.DisksResizeRequest(size_gb=739), ) + mock_args.update(sample_request) + client.resize(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.DisksResizeRequest.to_json( - disks_resize_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_resize_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/resize" + % client.transport._host, + args[1], + ) + + +def test_resize_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1779,60 +1996,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1840,42 +2064,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - zone_set_policy_request_resource = compute.ZoneSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - zone_set_policy_request_resource=zone_set_policy_request_resource, + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.ZoneSetPolicyRequest.to_json( - zone_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1898,9 +2127,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_labels_request_resource"] = compute.ZoneSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1910,7 +2142,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1928,14 +2159,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1946,7 +2176,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1964,18 +2193,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_labels_request_resource"] = compute.ZoneSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1983,42 +2237,47 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - zone_set_labels_request_resource = compute.ZoneSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - zone_set_labels_request_resource=zone_set_labels_request_resource, + zone_set_labels_request_resource=compute.ZoneSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.ZoneSetLabelsRequest.to_json( - zone_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2041,9 +2300,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2053,9 +2315,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -2065,12 +2327,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsDiskRequest +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2078,42 +2368,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = DisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/disks/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2217,8 +2512,10 @@ def test_disks_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_disks_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2242,29 +2539,6 @@ def test_disks_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_disks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.disks.transports.DisksTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DisksTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_disks_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2276,7 +2550,6 @@ def test_disks_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_disks_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2292,21 +2565,6 @@ def test_disks_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_disks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DisksClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_disks_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2449,3 +2707,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py b/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py index e34bb5d77..5e9170fd4 100644 --- a/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py +++ b/tests/unit/gapic/compute_v1/test_external_vpn_gateways.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.external_vpn_gateways import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.external_vpn_gateways import pagers from google.cloud.compute_v1.services.external_vpn_gateways import transports -from google.cloud.compute_v1.services.external_vpn_gateways.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -198,7 +182,7 @@ def test_external_vpn_gateways_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -207,6 +191,7 @@ def test_external_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,7 +199,7 @@ def test_external_vpn_gateways_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,6 +208,7 @@ def test_external_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -230,7 +216,7 @@ def test_external_vpn_gateways_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -239,6 +225,7 @@ def test_external_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -258,7 +245,7 @@ def test_external_vpn_gateways_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -267,6 +254,7 @@ def test_external_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -309,7 +297,7 @@ def test_external_vpn_gateways_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -326,6 +314,7 @@ def test_external_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -350,7 +339,7 @@ def test_external_vpn_gateways_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +348,7 @@ def test_external_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -371,7 +361,7 @@ def test_external_vpn_gateways_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,6 +370,7 @@ def test_external_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -394,7 +385,7 @@ def test_external_vpn_gateways_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -403,6 +394,7 @@ def test_external_vpn_gateways_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -417,7 +409,7 @@ def test_external_vpn_gateways_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -426,6 +418,7 @@ def test_external_vpn_gateways_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -436,9 +429,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "external_vpn_gateway": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -448,7 +441,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -466,14 +458,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -484,7 +475,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -502,19 +492,39 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteExternalVpnGatewayRequest +): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "external_vpn_gateway": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -523,31 +533,37 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "external_vpn_gateway": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", external_vpn_gateway="external_vpn_gateway_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "external_vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -567,9 +583,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "external_vpn_gateway": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -578,19 +594,17 @@ def test_get_rest( creation_timestamp="creation_timestamp_value", description="description_value", id=205, - interfaces=[compute.ExternalVpnGatewayInterface(id=205)], kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, name="name_value", redundancy_type=compute.ExternalVpnGateway.RedundancyType.FOUR_IPS_REDUNDANCY, self_link="self_link_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.ExternalVpnGateway.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ExternalVpnGateway.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -600,10 +614,8 @@ def test_get_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.id == 205 - assert response.interfaces == [compute.ExternalVpnGatewayInterface(id=205)] assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.name == "name_value" assert ( response.redundancy_type @@ -612,13 +624,36 @@ def test_get_rest( assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetExternalVpnGatewayRequest +): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "external_vpn_gateway": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -627,31 +662,37 @@ def test_get_rest_flattened(): return_value = compute.ExternalVpnGateway() # Wrap the value into a proper Response obj - json_return_value = compute.ExternalVpnGateway.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ExternalVpnGateway.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "external_vpn_gateway": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", external_vpn_gateway="external_vpn_gateway_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "external_vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/externalVpnGateways/{external_vpn_gateway}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -671,9 +712,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["external_vpn_gateway_resource"] = compute.ExternalVpnGateway( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -683,7 +727,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -701,14 +744,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -719,7 +761,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -737,19 +778,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertExternalVpnGatewayRequest +): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["external_vpn_gateway_resource"] = compute.ExternalVpnGateway( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -758,39 +822,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - external_vpn_gateway_resource = compute.ExternalVpnGateway( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - external_vpn_gateway_resource=external_vpn_gateway_resource, + external_vpn_gateway_resource=compute.ExternalVpnGateway( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.ExternalVpnGateway.to_json( - external_vpn_gateway_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/externalVpnGateways" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -812,9 +877,9 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -822,21 +887,15 @@ def test_list_rest( return_value = compute.ExternalVpnGatewayList( etag="etag_value", id="id_value", - items=[ - compute.ExternalVpnGateway( - creation_timestamp="creation_timestamp_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ExternalVpnGatewayList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ExternalVpnGatewayList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -845,22 +904,41 @@ def test_list_rest( assert isinstance(response, pagers.ListPager) assert response.etag == "etag_value" assert response.id == "id_value" - assert response.items == [ - compute.ExternalVpnGateway(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListExternalVpnGatewaysRequest +): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -869,28 +947,35 @@ def test_list_rest_flattened(): return_value = compute.ExternalVpnGatewayList() # Wrap the value into a proper Response obj - json_return_value = compute.ExternalVpnGatewayList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ExternalVpnGatewayList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/externalVpnGateways" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -901,13 +986,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ExternalVpnGatewaysClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ExternalVpnGatewayList( @@ -937,16 +1024,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ExternalVpnGateway) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -958,9 +1044,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -970,7 +1059,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -988,14 +1076,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1006,7 +1093,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1024,19 +1110,42 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsExternalVpnGatewayRequest +): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): +def test_set_labels_rest_flattened(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1045,41 +1154,41 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_labels_request_resource = compute.GlobalSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_labels_request_resource=global_set_labels_request_resource, + global_set_labels_request_resource=compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetLabelsRequest.to_json( - global_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1103,9 +1212,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1115,9 +1227,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1127,13 +1239,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsExternalVpnGatewayRequest, +): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1142,41 +1281,41 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/externalVpnGateways/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): client = ExternalVpnGatewaysClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1275,8 +1414,10 @@ def test_external_vpn_gateways_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_external_vpn_gateways_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1300,29 +1441,6 @@ def test_external_vpn_gateways_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_external_vpn_gateways_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.external_vpn_gateways.transports.ExternalVpnGatewaysTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ExternalVpnGatewaysTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_external_vpn_gateways_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1334,7 +1452,6 @@ def test_external_vpn_gateways_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_external_vpn_gateways_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1350,21 +1467,6 @@ def test_external_vpn_gateways_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_external_vpn_gateways_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ExternalVpnGatewaysClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_external_vpn_gateways_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1511,3 +1613,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ExternalVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_firewall_policies.py b/tests/unit/gapic/compute_v1/test_firewall_policies.py index afe59eb65..3ae275970 100644 --- a/tests/unit/gapic/compute_v1/test_firewall_policies.py +++ b/tests/unit/gapic/compute_v1/test_firewall_policies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.firewall_policies import FirewallPoliciesClient from google.cloud.compute_v1.services.firewall_policies import pagers from google.cloud.compute_v1.services.firewall_policies import transports -from google.cloud.compute_v1.services.firewall_policies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_firewall_policies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_firewall_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_firewall_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_firewall_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_firewall_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_firewall_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_firewall_policies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_firewall_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_firewall_policies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_firewall_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_firewall_policies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_firewall_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_firewall_policies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_firewall_policies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_firewall_policies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_firewall_policies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_firewall_policies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_firewall_policies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,12 @@ def test_add_association_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init[ + "firewall_policy_association_resource" + ] = compute.FirewallPolicyAssociation(attachment_target="attachment_target_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +442,6 @@ def test_add_association_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +459,13 @@ def test_add_association_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_association(request) @@ -482,7 +476,6 @@ def test_add_association_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +493,43 @@ def test_add_association_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_association_rest_bad_request( + transport: str = "rest", request_type=compute.AddAssociationFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init[ + "firewall_policy_association_resource" + ] = compute.FirewallPolicyAssociation(attachment_target="attachment_target_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_association(request) + + def test_add_association_rest_from_dict(): test_add_association_rest(request_type=dict) -def test_add_association_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_association_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,38 +537,41 @@ def test_add_association_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_policy_association_resource = compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - client.add_association( + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( firewall_policy="firewall_policy_value", - firewall_policy_association_resource=firewall_policy_association_resource, + firewall_policy_association_resource=compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ), ) + mock_args.update(sample_request) + client.add_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) - assert compute.FirewallPolicyAssociation.to_json( - firewall_policy_association_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_association_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addAssociation" + % client.transport._host, + args[1], + ) + + +def test_add_association_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -571,9 +592,12 @@ def test_add_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init["firewall_policy_rule_resource"] = compute.FirewallPolicyRule( + action="action_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -583,7 +607,6 @@ def test_add_rule_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -601,14 +624,13 @@ def test_add_rule_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_rule(request) @@ -619,7 +641,6 @@ def test_add_rule_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -637,18 +658,43 @@ def test_add_rule_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_rule_rest_bad_request( + transport: str = "rest", request_type=compute.AddRuleFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init["firewall_policy_rule_resource"] = compute.FirewallPolicyRule( + action="action_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule(request) + + def test_add_rule_rest_from_dict(): test_add_rule_rest(request_type=dict) -def test_add_rule_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_rule_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -656,38 +702,41 @@ def test_add_rule_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_policy_rule_resource = compute.FirewallPolicyRule( - action="action_value" - ) - client.add_rule( + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( firewall_policy="firewall_policy_value", - firewall_policy_rule_resource=firewall_policy_rule_resource, + firewall_policy_rule_resource=compute.FirewallPolicyRule( + action="action_value" + ), ) + mock_args.update(sample_request) + client.add_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) - assert compute.FirewallPolicyRule.to_json( - firewall_policy_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_rule_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/addRule" + % client.transport._host, + args[1], + ) + + +def test_add_rule_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -708,9 +757,9 @@ def test_clone_rules_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -720,7 +769,6 @@ def test_clone_rules_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -738,14 +786,13 @@ def test_clone_rules_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.clone_rules(request) @@ -756,7 +803,6 @@ def test_clone_rules_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -774,18 +820,40 @@ def test_clone_rules_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_clone_rules_rest_bad_request( + transport: str = "rest", request_type=compute.CloneRulesFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.clone_rules(request) + + def test_clone_rules_rest_from_dict(): test_clone_rules_rest(request_type=dict) -def test_clone_rules_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_clone_rules_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -793,27 +861,36 @@ def test_clone_rules_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.clone_rules(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.clone_rules(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/cloneRules" + % client.transport._host, + args[1], + ) -def test_clone_rules_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_clone_rules_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -831,9 +908,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -843,7 +920,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -861,14 +937,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -879,7 +954,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -897,18 +971,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -916,27 +1012,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -954,19 +1059,14 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.FirewallPolicy( - associations=[ - compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - ], creation_timestamp="creation_timestamp_value", description="description_value", display_name="display_name_value", @@ -976,25 +1076,21 @@ def test_get_rest( name="name_value", parent="parent_value", rule_tuple_count=1737, - rules=[compute.FirewallPolicyRule(action="action_value")], self_link="self_link_value", self_link_with_id="self_link_with_id_value", short_name="short_name_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.FirewallPolicy) - assert response.associations == [ - compute.FirewallPolicyAssociation(attachment_target="attachment_target_value") - ] assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.display_name == "display_name_value" @@ -1004,18 +1100,42 @@ def test_get_rest( assert response.name == "name_value" assert response.parent == "parent_value" assert response.rule_tuple_count == 1737 - assert response.rules == [compute.FirewallPolicyRule(action="action_value")] assert response.self_link == "self_link_value" assert response.self_link_with_id == "self_link_with_id_value" assert response.short_name == "short_name_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1023,27 +1143,36 @@ def test_get_rest_flattened(): return_value = compute.FirewallPolicy() # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1060,9 +1189,9 @@ def test_get_association_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1076,9 +1205,9 @@ def test_get_association_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicyAssociation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicyAssociation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_association(request) @@ -1092,12 +1221,37 @@ def test_get_association_rest( assert response.short_name == "short_name_value" +def test_get_association_rest_bad_request( + transport: str = "rest", request_type=compute.GetAssociationFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_association(request) + + def test_get_association_rest_from_dict(): test_get_association_rest(request_type=dict) -def test_get_association_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_association_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1105,27 +1259,36 @@ def test_get_association_rest_flattened(): return_value = compute.FirewallPolicyAssociation() # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicyAssociation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicyAssociation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_association(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.get_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getAssociation" + % client.transport._host, + args[1], + ) -def test_get_association_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_association_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1143,60 +1306,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1204,27 +1368,36 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy(resource="resource_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"resource": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(resource="resource_value",) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1241,9 +1414,9 @@ def test_get_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1255,9 +1428,6 @@ def test_get_rule_rest( disabled=True, enable_logging=True, kind="kind_value", - match=compute.FirewallPolicyRuleMatcher( - dest_ip_ranges=["dest_ip_ranges_value"] - ), priority=898, rule_tuple_count=1737, target_resources=["target_resources_value"], @@ -1265,9 +1435,9 @@ def test_get_rule_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicyRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicyRule.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_rule(request) @@ -1280,21 +1450,43 @@ def test_get_rule_rest( assert response.disabled is True assert response.enable_logging is True assert response.kind == "kind_value" - assert response.match == compute.FirewallPolicyRuleMatcher( - dest_ip_ranges=["dest_ip_ranges_value"] - ) assert response.priority == 898 assert response.rule_tuple_count == 1737 assert response.target_resources == ["target_resources_value"] assert response.target_service_accounts == ["target_service_accounts_value"] +def test_get_rule_rest_bad_request( + transport: str = "rest", request_type=compute.GetRuleFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + def test_get_rule_rest_from_dict(): test_get_rule_rest(request_type=dict) -def test_get_rule_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rule_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1302,27 +1494,36 @@ def test_get_rule_rest_flattened(): return_value = compute.FirewallPolicyRule() # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicyRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicyRule.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_rule(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.get_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/getRule" + % client.transport._host, + args[1], + ) -def test_get_rule_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rule_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1340,9 +1541,16 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {} + request_init["firewall_policy_resource"] = compute.FirewallPolicy( + associations=[ + compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1352,7 +1560,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1370,14 +1577,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1388,7 +1594,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1406,18 +1611,47 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {} + request_init["firewall_policy_resource"] = compute.FirewallPolicy( + associations=[ + compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1425,38 +1659,44 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_policy_resource = compute.FirewallPolicy( - associations=[ - compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - ] + # get arguments that satisfy an http rule for this method + sample_request = {} + + # get truthy value for each flattened field + mock_args = dict( + firewall_policy_resource=compute.FirewallPolicy( + associations=[ + compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ) + ] + ), ) - client.insert(firewall_policy_resource=firewall_policy_resource,) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert compute.FirewallPolicy.to_json( - firewall_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies" + % client.transport._host, + args[1], + ) -def test_insert_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1480,33 +1720,21 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.FirewallPolicyList( - id="id_value", - items=[ - compute.FirewallPolicy( - associations=[ - compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - ] - ) - ], - kind="kind_value", - next_page_token="next_page_token_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), + id="id_value", kind="kind_value", next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallPolicyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1514,65 +1742,44 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.FirewallPolicy( - associations=[ - compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - ] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) - -def test_list_rest_from_dict(): - test_list_rest(request_type=dict) +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListFirewallPoliciesRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) -def test_list_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.FirewallPolicyList() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPolicyList.to_json(return_value) response_value = Response() - response_value.status_code = 200 - response_value._content = json_return_value.encode("UTF-8") + response_value.status_code = 400 + response_value.request = Request() req.return_value = response_value + client.list(request) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - - -def test_list_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list(compute.ListFirewallPoliciesRequest(),) +def test_list_rest_from_dict(): + test_list_rest(request_type=dict) -def test_list_pager(): +def test_list_rest_pager(): client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.FirewallPolicyList( @@ -1602,16 +1809,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.FirewallPolicy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1623,80 +1829,57 @@ def test_list_associations_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.FirewallPoliciesListAssociationsResponse( - associations=[ - compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - ], kind="kind_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.FirewallPoliciesListAssociationsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_associations(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.FirewallPoliciesListAssociationsResponse) - assert response.associations == [ - compute.FirewallPolicyAssociation(attachment_target="attachment_target_value") - ] assert response.kind == "kind_value" -def test_list_associations_rest_from_dict(): - test_list_associations_rest(request_type=dict) - - -def test_list_associations_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_associations_rest_bad_request( + transport: str = "rest", request_type=compute.ListAssociationsFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.FirewallPoliciesListAssociationsResponse() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj - json_return_value = compute.FirewallPoliciesListAssociationsResponse.to_json( - return_value - ) response_value = Response() - response_value.status_code = 200 - response_value._content = json_return_value.encode("UTF-8") + response_value.status_code = 400 + response_value.request = Request() req.return_value = response_value + client.list_associations(request) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_associations() - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - -def test_list_associations_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_associations(compute.ListAssociationsFirewallPolicyRequest(),) +def test_list_associations_rest_from_dict(): + test_list_associations_rest(request_type=dict) def test_move_rest( @@ -1706,9 +1889,9 @@ def test_move_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1718,7 +1901,6 @@ def test_move_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1736,14 +1918,13 @@ def test_move_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.move(request) @@ -1754,7 +1935,6 @@ def test_move_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1772,18 +1952,40 @@ def test_move_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_move_rest_bad_request( + transport: str = "rest", request_type=compute.MoveFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move(request) + + def test_move_rest_from_dict(): test_move_rest(request_type=dict) -def test_move_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_move_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1791,27 +1993,36 @@ def test_move_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.move(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.move(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/move" + % client.transport._host, + args[1], + ) -def test_move_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_move_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1829,9 +2040,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init["firewall_policy_resource"] = compute.FirewallPolicy( + associations=[ + compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1841,7 +2059,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1859,14 +2076,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1877,7 +2093,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1895,18 +2110,47 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init["firewall_policy_resource"] = compute.FirewallPolicy( + associations=[ + compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1914,42 +2158,45 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_policy_resource = compute.FirewallPolicy( - associations=[ - compute.FirewallPolicyAssociation( - attachment_target="attachment_target_value" - ) - ] - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( firewall_policy="firewall_policy_value", - firewall_policy_resource=firewall_policy_resource, + firewall_policy_resource=compute.FirewallPolicy( + associations=[ + compute.FirewallPolicyAssociation( + attachment_target="attachment_target_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) - assert compute.FirewallPolicy.to_json( - firewall_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1974,9 +2221,12 @@ def test_patch_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init["firewall_policy_rule_resource"] = compute.FirewallPolicyRule( + action="action_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1986,7 +2236,6 @@ def test_patch_rule_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2004,14 +2253,13 @@ def test_patch_rule_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch_rule(request) @@ -2022,7 +2270,6 @@ def test_patch_rule_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2040,18 +2287,43 @@ def test_patch_rule_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rule_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRuleFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request_init["firewall_policy_rule_resource"] = compute.FirewallPolicyRule( + action="action_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule(request) + + def test_patch_rule_rest_from_dict(): test_patch_rule_rest(request_type=dict) -def test_patch_rule_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rule_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2059,38 +2331,41 @@ def test_patch_rule_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_policy_rule_resource = compute.FirewallPolicyRule( - action="action_value" - ) - client.patch_rule( + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( firewall_policy="firewall_policy_value", - firewall_policy_rule_resource=firewall_policy_rule_resource, + firewall_policy_rule_resource=compute.FirewallPolicyRule( + action="action_value" + ), ) + mock_args.update(sample_request) + client.patch_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) - assert compute.FirewallPolicyRule.to_json( - firewall_policy_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rule_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/patchRule" + % client.transport._host, + args[1], + ) + + +def test_patch_rule_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2111,9 +2386,9 @@ def test_remove_association_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2123,7 +2398,6 @@ def test_remove_association_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2141,14 +2415,13 @@ def test_remove_association_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_association(request) @@ -2159,7 +2432,6 @@ def test_remove_association_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2177,18 +2449,40 @@ def test_remove_association_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_association_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveAssociationFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_association(request) + + def test_remove_association_rest_from_dict(): test_remove_association_rest(request_type=dict) -def test_remove_association_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_association_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2196,27 +2490,36 @@ def test_remove_association_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.remove_association(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.remove_association(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeAssociation" + % client.transport._host, + args[1], + ) -def test_remove_association_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_association_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2234,9 +2537,9 @@ def test_remove_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2246,7 +2549,6 @@ def test_remove_rule_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2264,14 +2566,13 @@ def test_remove_rule_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_rule(request) @@ -2282,7 +2583,6 @@ def test_remove_rule_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2300,18 +2600,40 @@ def test_remove_rule_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_rule_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveRuleFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"firewall_policy": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule(request) + + def test_remove_rule_rest_from_dict(): test_remove_rule_rest(request_type=dict) -def test_remove_rule_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_rule_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2319,27 +2641,36 @@ def test_remove_rule_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.remove_rule(firewall_policy="firewall_policy_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"firewall_policy": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(firewall_policy="firewall_policy_value",) + mock_args.update(sample_request) + client.remove_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "firewall_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{firewall_policy}/removeRule" + % client.transport._host, + args[1], + ) -def test_remove_rule_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_rule_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2357,60 +2688,71 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request_init[ + "global_organization_set_policy_request_resource" + ] = compute.GlobalOrganizationSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyFirewallPolicyRequest +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request_init[ + "global_organization_set_policy_request_resource" + ] = compute.GlobalOrganizationSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2418,38 +2760,41 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_organization_set_policy_request_resource = compute.GlobalOrganizationSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"resource": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( resource="resource_value", - global_organization_set_policy_request_resource=global_organization_set_policy_request_resource, + global_organization_set_policy_request_resource=compute.GlobalOrganizationSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalOrganizationSetPolicyRequest.to_json( - global_organization_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2471,9 +2816,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2483,9 +2831,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -2495,12 +2843,41 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsFirewallPolicyRequest, +): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"resource": "sample1"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2508,38 +2885,41 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"resource": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = FirewallPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/firewallPolicies/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2646,8 +3026,10 @@ def test_firewall_policies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_firewall_policies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2671,29 +3053,6 @@ def test_firewall_policies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_firewall_policies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.firewall_policies.transports.FirewallPoliciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirewallPoliciesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_firewall_policies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2705,7 +3064,6 @@ def test_firewall_policies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_firewall_policies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2721,21 +3079,6 @@ def test_firewall_policies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_firewall_policies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FirewallPoliciesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_firewall_policies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2882,3 +3225,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = FirewallPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_firewalls.py b/tests/unit/gapic/compute_v1/test_firewalls.py index 6bfd4fd66..79504d961 100644 --- a/tests/unit/gapic/compute_v1/test_firewalls.py +++ b/tests/unit/gapic/compute_v1/test_firewalls.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.firewalls import FirewallsClient from google.cloud.compute_v1.services.firewalls import pagers from google.cloud.compute_v1.services.firewalls import transports -from google.cloud.compute_v1.services.firewalls.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_firewalls_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_firewalls_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_firewalls_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_firewalls_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_firewalls_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_firewalls_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_firewalls_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_firewalls_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_firewalls_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_firewalls_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_firewalls_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,9 +404,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -423,7 +416,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -441,14 +433,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -459,7 +450,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -477,18 +467,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteFirewallRequest +): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,30 +508,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", firewall="firewall_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "firewall": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", firewall="firewall_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "firewall_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/firewalls/{firewall}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -536,24 +554,21 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetFirewallReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")], creation_timestamp="creation_timestamp_value", - denied=[compute.Denied(I_p_protocol="I_p_protocol_value")], description="description_value", destination_ranges=["destination_ranges_value"], direction=compute.Firewall.Direction.EGRESS, disabled=True, id=205, kind="kind_value", - log_config=compute.FirewallLogConfig(enable=True), name="name_value", network="network_value", priority=898, @@ -566,25 +581,22 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetFirewallReque ) # Wrap the value into a proper Response obj - json_return_value = compute.Firewall.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Firewall.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Firewall) - assert response.allowed == [compute.Allowed(I_p_protocol="I_p_protocol_value")] assert response.creation_timestamp == "creation_timestamp_value" - assert response.denied == [compute.Denied(I_p_protocol="I_p_protocol_value")] assert response.description == "description_value" assert response.destination_ranges == ["destination_ranges_value"] assert response.direction == compute.Firewall.Direction.EGRESS assert response.disabled is True assert response.id == 205 assert response.kind == "kind_value" - assert response.log_config == compute.FirewallLogConfig(enable=True) assert response.name == "name_value" assert response.network == "network_value" assert response.priority == 898 @@ -596,12 +608,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetFirewallReque assert response.target_tags == ["target_tags_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetFirewallRequest +): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -609,30 +646,36 @@ def test_get_rest_flattened(): return_value = compute.Firewall() # Wrap the value into a proper Response obj - json_return_value = compute.Firewall.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Firewall.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", firewall="firewall_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "firewall": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", firewall="firewall_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "firewall_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/firewalls/{firewall}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -651,9 +694,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["firewall_resource"] = compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -663,7 +709,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -681,14 +726,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -699,7 +743,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -717,18 +760,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertFirewallRequest +): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["firewall_resource"] = compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -736,37 +804,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_resource = compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] - ) - client.insert( - project="project_value", firewall_resource=firewall_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + firewall_resource=compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Firewall.to_json( - firewall_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/firewalls" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -785,30 +857,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListFirewallsRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.FirewallList( id="id_value", - items=[ - compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.FirewallList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -816,21 +882,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListFirewallsRe # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Firewall(allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")]) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListFirewallsRequest +): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -838,27 +925,36 @@ def test_list_rest_flattened(): return_value = compute.FirewallList() # Wrap the value into a proper Response obj - json_return_value = compute.FirewallList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.FirewallList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/firewalls" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -868,11 +964,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.FirewallList( @@ -894,16 +992,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Firewall) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -913,9 +1010,12 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchFirewallR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request_init["firewall_resource"] = compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -925,7 +1025,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchFirewallR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -943,14 +1042,13 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchFirewallR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -961,7 +1059,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchFirewallR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -979,18 +1076,43 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchFirewallR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchFirewallRequest +): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request_init["firewall_resource"] = compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -998,40 +1120,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_resource = compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "firewall": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", firewall="firewall_value", - firewall_resource=firewall_resource, + firewall_resource=compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "firewall_value" in http_call[1] + str(body) + str(params) - assert compute.Firewall.to_json( - firewall_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/firewalls/{firewall}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1053,9 +1177,12 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request_init["firewall_resource"] = compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1065,7 +1192,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1083,14 +1209,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1101,7 +1226,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1119,18 +1243,43 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateFirewallRequest +): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "firewall": "sample2"} + request_init["firewall_resource"] = compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1138,40 +1287,42 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - firewall_resource = compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] - ) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "firewall": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", firewall="firewall_value", - firewall_resource=firewall_resource, + firewall_resource=compute.Firewall( + allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "firewall_value" in http_call[1] + str(body) + str(params) - assert compute.Firewall.to_json( - firewall_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = FirewallsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/firewalls/{firewall}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1267,8 +1418,10 @@ def test_firewalls_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_firewalls_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1292,29 +1445,6 @@ def test_firewalls_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_firewalls_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.firewalls.transports.FirewallsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.FirewallsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_firewalls_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1326,7 +1456,6 @@ def test_firewalls_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_firewalls_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1342,21 +1471,6 @@ def test_firewalls_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_firewalls_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - FirewallsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_firewalls_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1503,3 +1617,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = FirewallsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_forwarding_rules.py b/tests/unit/gapic/compute_v1/test_forwarding_rules.py index 8480cc362..a018dd7be 100644 --- a/tests/unit/gapic/compute_v1/test_forwarding_rules.py +++ b/tests/unit/gapic/compute_v1/test_forwarding_rules.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.forwarding_rules import ForwardingRulesClient from google.cloud.compute_v1.services.forwarding_rules import pagers from google.cloud.compute_v1.services.forwarding_rules import transports -from google.cloud.compute_v1.services.forwarding_rules.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_forwarding_rules_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_forwarding_rules_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_forwarding_rules_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_forwarding_rules_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_forwarding_rules_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_forwarding_rules_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_forwarding_rules_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_forwarding_rules_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_forwarding_rules_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_forwarding_rules_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_forwarding_rules_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_forwarding_rules_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_forwarding_rules_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_forwarding_rules_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,33 +426,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ForwardingRuleAggregatedList( id="id_value", - items={ - "key_value": compute.ForwardingRulesScopedList( - forwarding_rules=[ - compute.ForwardingRule(I_p_address="I_p_address_value") - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRuleAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRuleAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -467,24 +452,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.ForwardingRulesScopedList( - forwarding_rules=[compute.ForwardingRule(I_p_address="I_p_address_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListForwardingRulesRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -492,27 +496,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.ForwardingRuleAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRuleAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRuleAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/forwardingRules" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -522,11 +535,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ForwardingRuleAggregatedList( @@ -562,10 +577,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.ForwardingRulesScopedList) assert pager.get("h") is None @@ -583,7 +597,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.ForwardingRulesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -595,9 +609,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -607,7 +625,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -625,14 +642,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -643,7 +659,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -661,18 +676,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteForwardingRuleRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -680,33 +721,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", forwarding_rule="forwarding_rule_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -726,9 +778,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -747,13 +803,7 @@ def test_get_rest( is_mirroring_collector=True, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, load_balancing_scheme=compute.ForwardingRule.LoadBalancingScheme.EXTERNAL, - metadata_filters=[ - compute.MetadataFilter( - filter_labels=[compute.MetadataFilterLabelMatch(name="name_value")] - ) - ], name="name_value", network="network_value", network_tier=compute.ForwardingRule.NetworkTier.PREMIUM, @@ -763,11 +813,6 @@ def test_get_rest( psc_connection_status=compute.ForwardingRule.PscConnectionStatus.ACCEPTED, region="region_value", self_link="self_link_value", - service_directory_registrations=[ - compute.ForwardingRuleServiceDirectoryRegistration( - namespace="namespace_value" - ) - ], service_label="service_label_value", service_name="service_name_value", subnetwork="subnetwork_value", @@ -775,9 +820,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRule.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -797,16 +842,10 @@ def test_get_rest( assert response.is_mirroring_collector is True assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert ( response.load_balancing_scheme == compute.ForwardingRule.LoadBalancingScheme.EXTERNAL ) - assert response.metadata_filters == [ - compute.MetadataFilter( - filter_labels=[compute.MetadataFilterLabelMatch(name="name_value")] - ) - ] assert response.name == "name_value" assert response.network == "network_value" assert response.network_tier == compute.ForwardingRule.NetworkTier.PREMIUM @@ -819,21 +858,47 @@ def test_get_rest( ) assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.service_directory_registrations == [ - compute.ForwardingRuleServiceDirectoryRegistration(namespace="namespace_value") - ] assert response.service_label == "service_label_value" assert response.service_name == "service_name_value" assert response.subnetwork == "subnetwork_value" assert response.target == "target_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetForwardingRuleRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -841,33 +906,44 @@ def test_get_rest_flattened(): return_value = compute.ForwardingRule() # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRule.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", forwarding_rule="forwarding_rule_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -887,9 +963,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -899,7 +978,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -917,14 +995,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -935,7 +1012,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -953,18 +1029,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertForwardingRuleRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -972,40 +1073,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - forwarding_rule_resource = compute.ForwardingRule( - I_p_address="I_p_address_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - forwarding_rule_resource=forwarding_rule_resource, + forwarding_rule_resource=compute.ForwardingRule( + I_p_address="I_p_address_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.ForwardingRule.to_json( - forwarding_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1027,26 +1130,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ForwardingRuleList( id="id_value", - items=[compute.ForwardingRule(I_p_address="I_p_address_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRuleList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRuleList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1054,19 +1155,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.ForwardingRule(I_p_address="I_p_address_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListForwardingRulesRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1074,30 +1198,36 @@ def test_list_rest_flattened(): return_value = compute.ForwardingRuleList() # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRuleList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRuleList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1109,11 +1239,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ForwardingRuleList( @@ -1143,16 +1275,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ForwardingRule) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1164,9 +1295,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1176,7 +1314,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1194,14 +1331,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1212,7 +1348,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1230,18 +1365,47 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchForwardingRuleRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1249,42 +1413,47 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - forwarding_rule_resource = compute.ForwardingRule( - I_p_address="I_p_address_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", forwarding_rule="forwarding_rule_value", - forwarding_rule_resource=forwarding_rule_resource, + forwarding_rule_resource=compute.ForwardingRule( + I_p_address="I_p_address_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) - assert compute.ForwardingRule.to_json( - forwarding_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1307,9 +1476,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1319,7 +1491,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1337,14 +1508,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1355,7 +1525,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1373,18 +1542,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsForwardingRuleRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1392,42 +1586,47 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_labels_request_resource = compute.RegionSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_labels_request_resource=region_set_labels_request_resource, + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetLabelsRequest.to_json( - region_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1450,9 +1649,16 @@ def test_set_target_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request_init["target_reference_resource"] = compute.TargetReference( + target="target_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1462,7 +1668,6 @@ def test_set_target_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1480,14 +1685,13 @@ def test_set_target_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_target(request) @@ -1498,7 +1702,6 @@ def test_set_target_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1516,18 +1719,47 @@ def test_set_target_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_target_rest_bad_request( + transport: str = "rest", request_type=compute.SetTargetForwardingRuleRequest +): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + request_init["target_reference_resource"] = compute.TargetReference( + target="target_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target(request) + + def test_set_target_rest_from_dict(): test_set_target_rest(request_type=dict) -def test_set_target_rest_flattened(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_target_rest_flattened(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1535,40 +1767,45 @@ def test_set_target_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_reference_resource = compute.TargetReference(target="target_value") - client.set_target( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "forwarding_rule": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", forwarding_rule="forwarding_rule_value", - target_reference_resource=target_reference_resource, + target_reference_resource=compute.TargetReference(target="target_value"), ) + mock_args.update(sample_request) + client.set_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) - assert compute.TargetReference.to_json( - target_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_target_rest_flattened_error(): - client = ForwardingRulesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/forwardingRules/{forwarding_rule}/setTarget" + % client.transport._host, + args[1], + ) + + +def test_set_target_rest_flattened_error(transport: str = "rest"): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1665,8 +1902,10 @@ def test_forwarding_rules_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_forwarding_rules_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1690,29 +1929,6 @@ def test_forwarding_rules_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_forwarding_rules_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.forwarding_rules.transports.ForwardingRulesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ForwardingRulesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_forwarding_rules_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1724,7 +1940,6 @@ def test_forwarding_rules_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_forwarding_rules_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1740,21 +1955,6 @@ def test_forwarding_rules_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_forwarding_rules_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ForwardingRulesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_forwarding_rules_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1901,3 +2101,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_global_addresses.py b/tests/unit/gapic/compute_v1/test_global_addresses.py index 35f92ad4a..adb2492f2 100644 --- a/tests/unit/gapic/compute_v1/test_global_addresses.py +++ b/tests/unit/gapic/compute_v1/test_global_addresses.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.global_addresses import GlobalAddressesClient from google.cloud.compute_v1.services.global_addresses import pagers from google.cloud.compute_v1.services.global_addresses import transports -from google.cloud.compute_v1.services.global_addresses.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_global_addresses_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_global_addresses_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_global_addresses_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_global_addresses_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_global_addresses_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_global_addresses_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_global_addresses_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_global_addresses_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_global_addresses_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_global_addresses_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_global_addresses_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_global_addresses_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_global_addresses_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_global_addresses_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_global_addresses_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_global_addresses_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_global_addresses_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_global_addresses_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,9 +426,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "address": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -445,7 +438,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -463,14 +455,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -481,7 +472,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -499,18 +489,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteGlobalAddressRequest +): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "address": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -518,30 +530,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", address="address_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "address": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", address="address_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "address_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/addresses/{address}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -560,9 +578,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "address": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -588,9 +606,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.Address.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Address.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -616,12 +634,37 @@ def test_get_rest( assert response.users == ["users_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetGlobalAddressRequest +): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "address": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -629,30 +672,36 @@ def test_get_rest_flattened(): return_value = compute.Address() # Wrap the value into a proper Response obj - json_return_value = compute.Address.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Address.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", address="address_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "address": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", address="address_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "address_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/addresses/{address}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -671,9 +720,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["address_resource"] = compute.Address(address="address_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -683,7 +733,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -701,14 +750,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -719,7 +767,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -737,18 +784,41 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertGlobalAddressRequest +): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["address_resource"] = compute.Address(address="address_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -756,35 +826,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - address_resource = compute.Address(address="address_value") - client.insert( - project="project_value", address_resource=address_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + address_resource=compute.Address(address="address_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Address.to_json( - address_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/addresses" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -803,26 +877,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.AddressList( id="id_value", - items=[compute.Address(address="address_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.AddressList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AddressList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -830,19 +902,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Address(address="address_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListGlobalAddressesRequest +): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -850,27 +945,36 @@ def test_list_rest_flattened(): return_value = compute.AddressList() # Wrap the value into a proper Response obj - json_return_value = compute.AddressList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.AddressList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/addresses" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -880,11 +984,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = GlobalAddressesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.AddressList( @@ -906,16 +1012,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Address) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -999,8 +1104,10 @@ def test_global_addresses_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_global_addresses_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1024,29 +1131,6 @@ def test_global_addresses_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_global_addresses_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.global_addresses.transports.GlobalAddressesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GlobalAddressesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_global_addresses_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1058,7 +1142,6 @@ def test_global_addresses_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_global_addresses_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1074,21 +1157,6 @@ def test_global_addresses_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_global_addresses_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GlobalAddressesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_global_addresses_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1235,3 +1303,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = GlobalAddressesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py b/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py index c90f1f982..2b5974895 100644 --- a/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py +++ b/tests/unit/gapic/compute_v1/test_global_forwarding_rules.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.global_forwarding_rules import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.global_forwarding_rules import pagers from google.cloud.compute_v1.services.global_forwarding_rules import transports -from google.cloud.compute_v1.services.global_forwarding_rules.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_global_forwarding_rules_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_global_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_global_forwarding_rules_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_global_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_global_forwarding_rules_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_global_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_global_forwarding_rules_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_global_forwarding_rules_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_global_forwarding_rules_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_global_forwarding_rules_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_global_forwarding_rules_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_global_forwarding_rules_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_global_forwarding_rules_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_global_forwarding_rules_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_global_forwarding_rules_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_global_forwarding_rules_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_global_forwarding_rules_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_global_forwarding_rules_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -454,9 +447,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -466,7 +459,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -484,14 +476,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -502,7 +493,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -520,19 +510,39 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteGlobalForwardingRuleRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -541,31 +551,37 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "forwarding_rule": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", forwarding_rule="forwarding_rule_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -585,9 +601,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -606,13 +622,7 @@ def test_get_rest( is_mirroring_collector=True, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, load_balancing_scheme=compute.ForwardingRule.LoadBalancingScheme.EXTERNAL, - metadata_filters=[ - compute.MetadataFilter( - filter_labels=[compute.MetadataFilterLabelMatch(name="name_value")] - ) - ], name="name_value", network="network_value", network_tier=compute.ForwardingRule.NetworkTier.PREMIUM, @@ -622,11 +632,6 @@ def test_get_rest( psc_connection_status=compute.ForwardingRule.PscConnectionStatus.ACCEPTED, region="region_value", self_link="self_link_value", - service_directory_registrations=[ - compute.ForwardingRuleServiceDirectoryRegistration( - namespace="namespace_value" - ) - ], service_label="service_label_value", service_name="service_name_value", subnetwork="subnetwork_value", @@ -634,9 +639,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRule.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -656,16 +661,10 @@ def test_get_rest( assert response.is_mirroring_collector is True assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert ( response.load_balancing_scheme == compute.ForwardingRule.LoadBalancingScheme.EXTERNAL ) - assert response.metadata_filters == [ - compute.MetadataFilter( - filter_labels=[compute.MetadataFilterLabelMatch(name="name_value")] - ) - ] assert response.name == "name_value" assert response.network == "network_value" assert response.network_tier == compute.ForwardingRule.NetworkTier.PREMIUM @@ -678,22 +677,42 @@ def test_get_rest( ) assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.service_directory_registrations == [ - compute.ForwardingRuleServiceDirectoryRegistration(namespace="namespace_value") - ] assert response.service_label == "service_label_value" assert response.service_name == "service_name_value" assert response.subnetwork == "subnetwork_value" assert response.target == "target_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetGlobalForwardingRuleRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -702,31 +721,37 @@ def test_get_rest_flattened(): return_value = compute.ForwardingRule() # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRule.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "forwarding_rule": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", forwarding_rule="forwarding_rule_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -746,9 +771,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -758,7 +786,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -776,14 +803,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -794,7 +820,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -812,19 +837,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertGlobalForwardingRuleRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -833,38 +881,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - forwarding_rule_resource = compute.ForwardingRule( - I_p_address="I_p_address_value" - ) - client.insert( - project="project_value", forwarding_rule_resource=forwarding_rule_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + forwarding_rule_resource=compute.ForwardingRule( + I_p_address="I_p_address_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.ForwardingRule.to_json( - forwarding_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -886,26 +936,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ForwardingRuleList( id="id_value", - items=[compute.ForwardingRule(I_p_address="I_p_address_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRuleList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRuleList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -913,20 +961,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.ForwardingRule(I_p_address="I_p_address_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListGlobalForwardingRulesRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -935,28 +1004,35 @@ def test_list_rest_flattened(): return_value = compute.ForwardingRuleList() # Wrap the value into a proper Response obj - json_return_value = compute.ForwardingRuleList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ForwardingRuleList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -967,13 +1043,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = GlobalForwardingRulesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ForwardingRuleList( @@ -1003,16 +1081,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ForwardingRule) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1024,9 +1101,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1036,7 +1116,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1054,14 +1133,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1072,7 +1150,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1090,19 +1167,42 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchGlobalForwardingRuleRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request_init["forwarding_rule_resource"] = compute.ForwardingRule( + I_p_address="I_p_address_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1111,41 +1211,41 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - forwarding_rule_resource = compute.ForwardingRule( - I_p_address="I_p_address_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "forwarding_rule": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", forwarding_rule="forwarding_rule_value", - forwarding_rule_resource=forwarding_rule_resource, + forwarding_rule_resource=compute.ForwardingRule( + I_p_address="I_p_address_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) - assert compute.ForwardingRule.to_json( - forwarding_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1168,9 +1268,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1180,7 +1283,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1198,14 +1300,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1216,7 +1317,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1234,19 +1334,42 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsGlobalForwardingRuleRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): +def test_set_labels_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1255,41 +1378,41 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_labels_request_resource = compute.GlobalSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_labels_request_resource=global_set_labels_request_resource, + global_set_labels_request_resource=compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetLabelsRequest.to_json( - global_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1312,9 +1435,12 @@ def test_set_target_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request_init["target_reference_resource"] = compute.TargetReference( + target="target_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1324,7 +1450,6 @@ def test_set_target_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1342,14 +1467,13 @@ def test_set_target_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_target(request) @@ -1360,7 +1484,6 @@ def test_set_target_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1378,19 +1501,42 @@ def test_set_target_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_target_rest_bad_request( + transport: str = "rest", request_type=compute.SetTargetGlobalForwardingRuleRequest +): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "forwarding_rule": "sample2"} + request_init["target_reference_resource"] = compute.TargetReference( + target="target_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target(request) + + def test_set_target_rest_from_dict(): test_set_target_rest(request_type=dict) -def test_set_target_rest_flattened(): +def test_set_target_rest_flattened(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1399,39 +1545,39 @@ def test_set_target_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_reference_resource = compute.TargetReference(target="target_value") - client.set_target( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "forwarding_rule": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", forwarding_rule="forwarding_rule_value", - target_reference_resource=target_reference_resource, + target_reference_resource=compute.TargetReference(target="target_value"), ) + mock_args.update(sample_request) + client.set_target(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "forwarding_rule_value" in http_call[1] + str(body) + str(params) - assert compute.TargetReference.to_json( - target_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_target_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/forwardingRules/{forwarding_rule}/setTarget" + % client.transport._host, + args[1], + ) + + +def test_set_target_rest_flattened_error(transport: str = "rest"): client = GlobalForwardingRulesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1529,8 +1675,10 @@ def test_global_forwarding_rules_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_global_forwarding_rules_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1554,29 +1702,6 @@ def test_global_forwarding_rules_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_global_forwarding_rules_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.global_forwarding_rules.transports.GlobalForwardingRulesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GlobalForwardingRulesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_global_forwarding_rules_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1588,7 +1713,6 @@ def test_global_forwarding_rules_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_global_forwarding_rules_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1604,21 +1728,6 @@ def test_global_forwarding_rules_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_global_forwarding_rules_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GlobalForwardingRulesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_global_forwarding_rules_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1765,3 +1874,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = GlobalForwardingRulesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py b/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py index 833f6fbac..c06a0fc5b 100644 --- a/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py +++ b/tests/unit/gapic/compute_v1/test_global_network_endpoint_groups.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.global_network_endpoint_groups import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.global_network_endpoint_groups import pagers from google.cloud.compute_v1.services.global_network_endpoint_groups import transports -from google.cloud.compute_v1.services.global_network_endpoint_groups.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_global_network_endpoint_groups_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +203,7 @@ def test_global_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +211,7 @@ def test_global_network_endpoint_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +220,7 @@ def test_global_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -242,7 +228,7 @@ def test_global_network_endpoint_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +237,7 @@ def test_global_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -270,7 +257,7 @@ def test_global_network_endpoint_groups_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -279,6 +266,7 @@ def test_global_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -321,7 +309,7 @@ def test_global_network_endpoint_groups_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -338,6 +326,7 @@ def test_global_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -362,7 +351,7 @@ def test_global_network_endpoint_groups_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -371,6 +360,7 @@ def test_global_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -383,7 +373,7 @@ def test_global_network_endpoint_groups_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -392,6 +382,7 @@ def test_global_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,7 +403,7 @@ def test_global_network_endpoint_groups_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,6 +412,7 @@ def test_global_network_endpoint_groups_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,7 +433,7 @@ def test_global_network_endpoint_groups_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -450,6 +442,7 @@ def test_global_network_endpoint_groups_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -461,9 +454,16 @@ def test_attach_network_endpoints_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ] = compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -473,7 +473,6 @@ def test_attach_network_endpoints_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -491,14 +490,13 @@ def test_attach_network_endpoints_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.attach_network_endpoints(request) @@ -509,7 +507,6 @@ def test_attach_network_endpoints_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -527,19 +524,47 @@ def test_attach_network_endpoints_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_attach_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.AttachNetworkEndpointsGlobalNetworkEndpointGroupRequest, +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request_init[ + "global_network_endpoint_groups_attach_endpoints_request_resource" + ] = compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_network_endpoints(request) + + def test_attach_network_endpoints_rest_from_dict(): test_attach_network_endpoints_rest(request_type=dict) -def test_attach_network_endpoints_rest_flattened(): +def test_attach_network_endpoints_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -548,43 +573,43 @@ def test_attach_network_endpoints_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_network_endpoint_groups_attach_endpoints_request_resource = compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest( - network_endpoints=[ - compute.NetworkEndpoint(annotations={"key_value": "value_value"}) - ] - ) - client.attach_network_endpoints( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network_endpoint_group": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network_endpoint_group="network_endpoint_group_value", - global_network_endpoint_groups_attach_endpoints_request_resource=global_network_endpoint_groups_attach_endpoints_request_resource, + global_network_endpoint_groups_attach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) + mock_args.update(sample_request) + client.attach_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalNetworkEndpointGroupsAttachEndpointsRequest.to_json( - global_network_endpoint_groups_attach_endpoints_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_attach_network_endpoints_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_attach_network_endpoints_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -610,9 +635,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -622,7 +647,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -640,14 +664,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -658,7 +681,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -676,19 +698,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteGlobalNetworkEndpointGroupRequest, +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -697,32 +740,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network_endpoint_group": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -743,9 +792,16 @@ def test_detach_network_endpoints_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ] = compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -755,7 +811,6 @@ def test_detach_network_endpoints_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -773,14 +828,13 @@ def test_detach_network_endpoints_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.detach_network_endpoints(request) @@ -791,7 +845,6 @@ def test_detach_network_endpoints_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -809,19 +862,47 @@ def test_detach_network_endpoints_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_detach_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.DetachNetworkEndpointsGlobalNetworkEndpointGroupRequest, +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request_init[ + "global_network_endpoint_groups_detach_endpoints_request_resource" + ] = compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints(request) + + def test_detach_network_endpoints_rest_from_dict(): test_detach_network_endpoints_rest(request_type=dict) -def test_detach_network_endpoints_rest_flattened(): +def test_detach_network_endpoints_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -830,43 +911,43 @@ def test_detach_network_endpoints_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_network_endpoint_groups_detach_endpoints_request_resource = compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest( - network_endpoints=[ - compute.NetworkEndpoint(annotations={"key_value": "value_value"}) - ] - ) - client.detach_network_endpoints( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network_endpoint_group": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network_endpoint_group="network_endpoint_group_value", - global_network_endpoint_groups_detach_endpoints_request_resource=global_network_endpoint_groups_detach_endpoints_request_resource, + global_network_endpoint_groups_detach_endpoints_request_resource=compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) + mock_args.update(sample_request) + client.detach_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalNetworkEndpointGroupsDetachEndpointsRequest.to_json( - global_network_endpoint_groups_detach_endpoints_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_detach_network_endpoints_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_detach_network_endpoints_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -891,20 +972,14 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"}, - app_engine=compute.NetworkEndpointGroupAppEngine(service="service_value"), - cloud_function=compute.NetworkEndpointGroupCloudFunction( - function="function_value" - ), - cloud_run=compute.NetworkEndpointGroupCloudRun(service="service_value"), creation_timestamp="creation_timestamp_value", default_port=1289, description="description_value", @@ -921,25 +996,15 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.NetworkEndpointGroup) - assert response.annotations == {"key_value": "value_value"} - assert response.app_engine == compute.NetworkEndpointGroupAppEngine( - service="service_value" - ) - assert response.cloud_function == compute.NetworkEndpointGroupCloudFunction( - function="function_value" - ) - assert response.cloud_run == compute.NetworkEndpointGroupCloudRun( - service="service_value" - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_port == 1289 assert response.description == "description_value" @@ -958,13 +1023,36 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetGlobalNetworkEndpointGroupRequest +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -973,32 +1061,38 @@ def test_get_rest_flattened(): return_value = compute.NetworkEndpointGroup() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroup.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network_endpoint_group": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1019,9 +1113,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["network_endpoint_group_resource"] = compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1031,7 +1128,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1049,14 +1145,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1067,7 +1162,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1085,19 +1179,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertGlobalNetworkEndpointGroupRequest, +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["network_endpoint_group_resource"] = compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1106,39 +1224,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_endpoint_group_resource = compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"} - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - network_endpoint_group_resource=network_endpoint_group_resource, + network_endpoint_group_resource=compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkEndpointGroup.to_json( - network_endpoint_group_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1160,28 +1279,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroupList( id="id_value", - items=[ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1189,22 +1304,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListGlobalNetworkEndpointGroupsRequest +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1213,28 +1347,35 @@ def test_list_rest_flattened(): return_value = compute.NetworkEndpointGroupList() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1245,13 +1386,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = GlobalNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkEndpointGroupList( @@ -1281,16 +1424,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NetworkEndpointGroup) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1303,37 +1445,23 @@ def test_list_network_endpoints_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroupsListNetworkEndpoints( - id="id_value", - items=[ - compute.NetworkEndpointWithHealthStatus( - healths=[ - compute.HealthStatusForNetworkEndpoint( - backend_service=compute.BackendServiceReference( - backend_service="backend_service_value" - ) - ) - ] - ) - ], - kind="kind_value", - next_page_token="next_page_token_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), + id="id_value", kind="kind_value", next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_network_endpoints(request) @@ -1341,29 +1469,41 @@ def test_list_network_endpoints_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListNetworkEndpointsPager) assert response.id == "id_value" - assert response.items == [ - compute.NetworkEndpointWithHealthStatus( - healths=[ - compute.HealthStatusForNetworkEndpoint( - backend_service=compute.BackendServiceReference( - backend_service="backend_service_value" - ) - ) - ] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest, +): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network_endpoint_group": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_network_endpoints(request) def test_list_network_endpoints_rest_from_dict(): test_list_network_endpoints_rest(request_type=dict) -def test_list_network_endpoints_rest_flattened(): +def test_list_network_endpoints_rest_flattened(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1372,34 +1512,40 @@ def test_list_network_endpoints_rest_flattened(): return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_network_endpoints( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network_endpoint_group": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.list_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints" + % client.transport._host, + args[1], + ) -def test_list_network_endpoints_rest_flattened_error(): +def test_list_network_endpoints_rest_flattened_error(transport: str = "rest"): client = GlobalNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1412,13 +1558,15 @@ def test_list_network_endpoints_rest_flattened_error(): ) -def test_list_network_endpoints_pager(): +def test_list_network_endpoints_rest_pager(): client = GlobalNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkEndpointGroupsListNetworkEndpoints( @@ -1457,10 +1605,9 @@ def test_list_network_endpoints_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_network_endpoints(request={}) + sample_request = {"project": "sample1", "network_endpoint_group": "sample2"} - assert pager._metadata == metadata + pager = client.list_network_endpoints(request=sample_request) results = list(pager) assert len(results) == 6 @@ -1468,7 +1615,7 @@ def test_list_network_endpoints_pager(): isinstance(i, compute.NetworkEndpointWithHealthStatus) for i in results ) - pages = list(client.list_network_endpoints(request={}).pages) + pages = list(client.list_network_endpoints(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1557,8 +1704,10 @@ def test_global_network_endpoint_groups_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_global_network_endpoint_groups_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1582,29 +1731,6 @@ def test_global_network_endpoint_groups_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_global_network_endpoint_groups_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.global_network_endpoint_groups.transports.GlobalNetworkEndpointGroupsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GlobalNetworkEndpointGroupsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_global_network_endpoint_groups_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1616,7 +1742,6 @@ def test_global_network_endpoint_groups_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_global_network_endpoint_groups_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1632,21 +1757,6 @@ def test_global_network_endpoint_groups_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_global_network_endpoint_groups_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GlobalNetworkEndpointGroupsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_global_network_endpoint_groups_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1795,3 +1905,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = GlobalNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_global_operations.py b/tests/unit/gapic/compute_v1/test_global_operations.py index 6141295c6..9fdda7060 100644 --- a/tests/unit/gapic/compute_v1/test_global_operations.py +++ b/tests/unit/gapic/compute_v1/test_global_operations.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.global_operations import GlobalOperationsClient from google.cloud.compute_v1.services.global_operations import pagers from google.cloud.compute_v1.services.global_operations import transports -from google.cloud.compute_v1.services.global_operations.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_global_operations_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_global_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_global_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_global_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_global_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_global_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_global_operations_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_global_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_global_operations_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_global_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_global_operations_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_global_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_global_operations_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_global_operations_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_global_operations_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_global_operations_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_global_operations_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_global_operations_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,35 +427,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.OperationAggregatedList( id="id_value", - items={ - "key_value": compute.OperationsScopedList( - operations=[ - compute.Operation( - client_operation_id="client_operation_id_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.OperationAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -470,26 +453,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.OperationsScopedList( - operations=[ - compute.Operation(client_operation_id="client_operation_id_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListGlobalOperationsRequest +): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -497,27 +497,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.OperationAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.OperationAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/operations" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -527,11 +536,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.OperationAggregatedList( @@ -564,10 +575,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.OperationsScopedList) assert pager.get("h") is None @@ -582,7 +592,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.OperationsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -594,9 +604,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "operation": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -604,9 +614,9 @@ def test_delete_rest( return_value = compute.DeleteGlobalOperationResponse() # Wrap the value into a proper Response obj - json_return_value = compute.DeleteGlobalOperationResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DeleteGlobalOperationResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -615,12 +625,37 @@ def test_delete_rest( assert isinstance(response, compute.DeleteGlobalOperationResponse) +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteGlobalOperationRequest +): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "operation": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -628,30 +663,36 @@ def test_delete_rest_flattened(): return_value = compute.DeleteGlobalOperationResponse() # Wrap the value into a proper Response obj - json_return_value = compute.DeleteGlobalOperationResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DeleteGlobalOperationResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", operation="operation_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "operation": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", operation="operation_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -670,9 +711,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "operation": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -682,7 +723,6 @@ def test_get_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -700,14 +740,13 @@ def test_get_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -718,7 +757,6 @@ def test_get_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -736,18 +774,40 @@ def test_get_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetGlobalOperationRequest +): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "operation": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -755,30 +815,36 @@ def test_get_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", operation="operation_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "operation": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", operation="operation_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -797,26 +863,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.OperationList( id="id_value", - items=[compute.Operation(client_operation_id="client_operation_id_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -824,21 +888,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Operation(client_operation_id="client_operation_id_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListGlobalOperationsRequest +): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -846,27 +931,36 @@ def test_list_rest_flattened(): return_value = compute.OperationList() # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/operations" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -876,11 +970,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.OperationList( @@ -902,16 +998,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Operation) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -923,9 +1018,9 @@ def test_wait_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "operation": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -935,7 +1030,6 @@ def test_wait_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -953,14 +1047,13 @@ def test_wait_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.wait(request) @@ -971,7 +1064,6 @@ def test_wait_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -989,18 +1081,40 @@ def test_wait_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_wait_rest_bad_request( + transport: str = "rest", request_type=compute.WaitGlobalOperationRequest +): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "operation": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait(request) + + def test_wait_rest_from_dict(): test_wait_rest(request_type=dict) -def test_wait_rest_flattened(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_wait_rest_flattened(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1008,30 +1122,36 @@ def test_wait_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.wait( - project="project_value", operation="operation_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "operation": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", operation="operation_value",) + mock_args.update(sample_request) + client.wait(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/operations/{operation}/wait" + % client.transport._host, + args[1], + ) -def test_wait_rest_flattened_error(): - client = GlobalOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_wait_rest_flattened_error(transport: str = "rest"): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1123,8 +1243,10 @@ def test_global_operations_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_global_operations_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1148,29 +1270,6 @@ def test_global_operations_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_global_operations_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.global_operations.transports.GlobalOperationsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GlobalOperationsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_global_operations_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1182,7 +1281,6 @@ def test_global_operations_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_global_operations_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1198,21 +1296,6 @@ def test_global_operations_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_global_operations_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GlobalOperationsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_global_operations_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1359,3 +1442,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = GlobalOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_global_organization_operations.py b/tests/unit/gapic/compute_v1/test_global_organization_operations.py index 1cc609c3e..b0e9c0e16 100644 --- a/tests/unit/gapic/compute_v1/test_global_organization_operations.py +++ b/tests/unit/gapic/compute_v1/test_global_organization_operations.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.global_organization_operations import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.global_organization_operations import pagers from google.cloud.compute_v1.services.global_organization_operations import transports -from google.cloud.compute_v1.services.global_organization_operations.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_global_organization_operations_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +203,7 @@ def test_global_organization_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +211,7 @@ def test_global_organization_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +220,7 @@ def test_global_organization_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -242,7 +228,7 @@ def test_global_organization_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +237,7 @@ def test_global_organization_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -270,7 +257,7 @@ def test_global_organization_operations_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -279,6 +266,7 @@ def test_global_organization_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -321,7 +309,7 @@ def test_global_organization_operations_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -338,6 +326,7 @@ def test_global_organization_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -362,7 +351,7 @@ def test_global_organization_operations_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -371,6 +360,7 @@ def test_global_organization_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -383,7 +373,7 @@ def test_global_organization_operations_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -392,6 +382,7 @@ def test_global_organization_operations_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,7 +403,7 @@ def test_global_organization_operations_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,6 +412,7 @@ def test_global_organization_operations_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,7 +433,7 @@ def test_global_organization_operations_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -450,6 +442,7 @@ def test_global_organization_operations_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -461,9 +454,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"operation": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -471,11 +464,11 @@ def test_delete_rest( return_value = compute.DeleteGlobalOrganizationOperationResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.DeleteGlobalOrganizationOperationResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -484,13 +477,37 @@ def test_delete_rest( assert isinstance(response, compute.DeleteGlobalOrganizationOperationResponse) +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteGlobalOrganizationOperationRequest, +): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"operation": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = GlobalOrganizationOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -499,30 +516,37 @@ def test_delete_rest_flattened(): return_value = compute.DeleteGlobalOrganizationOperationResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.DeleteGlobalOrganizationOperationResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete(operation="operation_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"operation": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(operation="operation_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = GlobalOrganizationOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -541,9 +565,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"operation": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -553,7 +577,6 @@ def test_get_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -571,14 +594,13 @@ def test_get_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -589,7 +611,6 @@ def test_get_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -607,19 +628,39 @@ def test_get_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetGlobalOrganizationOperationRequest +): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"operation": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = GlobalOrganizationOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -628,28 +669,35 @@ def test_get_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get(operation="operation_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"operation": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(operation="operation_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/locations/global/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = GlobalOrganizationOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -669,26 +717,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.OperationList( id="id_value", - items=[compute.Operation(client_operation_id="client_operation_id_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -696,66 +742,48 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Operation(client_operation_id="client_operation_id_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) - -def test_list_rest_from_dict(): - test_list_rest(request_type=dict) - -def test_list_rest_flattened(): +def test_list_rest_bad_request( + transport: str = "rest", + request_type=compute.ListGlobalOrganizationOperationsRequest, +): client = GlobalOrganizationOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # Designate an appropriate value for the returned response. - return_value = compute.OperationList() + # send a request that will satisfy transcoding + request_init = {} + request = request_type(request_init) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() - response_value.status_code = 200 - response_value._content = json_return_value.encode("UTF-8") + response_value.status_code = 400 + response_value.request = Request() req.return_value = response_value + client.list(request) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list() - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - - -def test_list_rest_flattened_error(): - client = GlobalOrganizationOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list(compute.ListGlobalOrganizationOperationsRequest(),) +def test_list_rest_from_dict(): + test_list_rest(request_type=dict) -def test_list_pager(): +def test_list_rest_pager(): client = GlobalOrganizationOperationsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.OperationList( @@ -777,16 +805,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Operation) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -871,8 +898,10 @@ def test_global_organization_operations_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_global_organization_operations_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -896,29 +925,6 @@ def test_global_organization_operations_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_global_organization_operations_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.global_organization_operations.transports.GlobalOrganizationOperationsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GlobalOrganizationOperationsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_global_organization_operations_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -930,7 +936,6 @@ def test_global_organization_operations_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_global_organization_operations_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -946,21 +951,6 @@ def test_global_organization_operations_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_global_organization_operations_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GlobalOrganizationOperationsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_global_organization_operations_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1109,3 +1099,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = GlobalOrganizationOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py b/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py index ea04a4812..fa9210d61 100644 --- a/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py +++ b/tests/unit/gapic/compute_v1/test_global_public_delegated_prefixes.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.global_public_delegated_prefixes import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.global_public_delegated_prefixes import pagers from google.cloud.compute_v1.services.global_public_delegated_prefixes import transports -from google.cloud.compute_v1.services.global_public_delegated_prefixes.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -216,7 +200,7 @@ def test_global_public_delegated_prefixes_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -225,6 +209,7 @@ def test_global_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -232,7 +217,7 @@ def test_global_public_delegated_prefixes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -241,6 +226,7 @@ def test_global_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -248,7 +234,7 @@ def test_global_public_delegated_prefixes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -257,6 +243,7 @@ def test_global_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -276,7 +263,7 @@ def test_global_public_delegated_prefixes_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -285,6 +272,7 @@ def test_global_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -327,7 +315,7 @@ def test_global_public_delegated_prefixes_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -344,6 +332,7 @@ def test_global_public_delegated_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -368,7 +357,7 @@ def test_global_public_delegated_prefixes_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +366,7 @@ def test_global_public_delegated_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -389,7 +379,7 @@ def test_global_public_delegated_prefixes_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -398,6 +388,7 @@ def test_global_public_delegated_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -418,7 +409,7 @@ def test_global_public_delegated_prefixes_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -427,6 +418,7 @@ def test_global_public_delegated_prefixes_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -447,7 +439,7 @@ def test_global_public_delegated_prefixes_client_client_options_credentials_file options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -456,6 +448,7 @@ def test_global_public_delegated_prefixes_client_client_options_credentials_file client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -467,9 +460,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -479,7 +472,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -497,14 +489,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -515,7 +506,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -533,19 +523,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteGlobalPublicDelegatedPrefixeRequest, +): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -554,32 +565,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_delegated_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", public_delegated_prefix="public_delegated_prefix_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "public_delegated_prefix_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -599,9 +616,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -616,20 +633,15 @@ def test_get_rest( kind="kind_value", name="name_value", parent_prefix="parent_prefix_value", - public_delegated_sub_prefixs=[ - compute.PublicDelegatedPrefixPublicDelegatedSubPrefix( - delegatee_project="delegatee_project_value" - ) - ], region="region_value", self_link="self_link_value", status=compute.PublicDelegatedPrefix.Status.ANNOUNCED, ) # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -645,23 +657,41 @@ def test_get_rest( assert response.kind == "kind_value" assert response.name == "name_value" assert response.parent_prefix == "parent_prefix_value" - assert response.public_delegated_sub_prefixs == [ - compute.PublicDelegatedPrefixPublicDelegatedSubPrefix( - delegatee_project="delegatee_project_value" - ) - ] assert response.region == "region_value" assert response.self_link == "self_link_value" assert response.status == compute.PublicDelegatedPrefix.Status.ANNOUNCED +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetGlobalPublicDelegatedPrefixeRequest +): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -670,32 +700,38 @@ def test_get_rest_flattened(): return_value = compute.PublicDelegatedPrefix() # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_delegated_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", public_delegated_prefix="public_delegated_prefix_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "public_delegated_prefix_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -716,9 +752,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -728,7 +767,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -746,14 +784,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -764,7 +801,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -782,19 +818,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertGlobalPublicDelegatedPrefixeRequest, +): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -803,39 +863,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - public_delegated_prefix_resource = compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - public_delegated_prefix_resource=public_delegated_prefix_resource, + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.PublicDelegatedPrefix.to_json( - public_delegated_prefix_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -858,30 +919,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicDelegatedPrefixList( id="id_value", - items=[ - compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -889,22 +944,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.PublicDelegatedPrefix(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", + request_type=compute.ListGlobalPublicDelegatedPrefixesRequest, +): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -913,28 +988,35 @@ def test_list_rest_flattened(): return_value = compute.PublicDelegatedPrefixList() # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -945,13 +1027,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = GlobalPublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.PublicDelegatedPrefixList( @@ -984,16 +1068,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.PublicDelegatedPrefix) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1006,9 +1089,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1018,7 +1104,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1036,14 +1121,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1054,7 +1138,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1072,19 +1155,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", + request_type=compute.PatchGlobalPublicDelegatedPrefixeRequest, +): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_delegated_prefix": "sample2"} + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1093,41 +1200,41 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - public_delegated_prefix_resource = compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_delegated_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", public_delegated_prefix="public_delegated_prefix_value", - public_delegated_prefix_resource=public_delegated_prefix_resource, + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "public_delegated_prefix_value" in http_call[1] + str(body) + str(params) - assert compute.PublicDelegatedPrefix.to_json( - public_delegated_prefix_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = GlobalPublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1225,8 +1332,10 @@ def test_global_public_delegated_prefixes_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_global_public_delegated_prefixes_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1250,29 +1359,6 @@ def test_global_public_delegated_prefixes_base_transport_with_credentials_file() ) -@requires_google_auth_lt_1_25_0 -def test_global_public_delegated_prefixes_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.global_public_delegated_prefixes.transports.GlobalPublicDelegatedPrefixesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.GlobalPublicDelegatedPrefixesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_global_public_delegated_prefixes_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1284,7 +1370,6 @@ def test_global_public_delegated_prefixes_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_global_public_delegated_prefixes_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1300,21 +1385,6 @@ def test_global_public_delegated_prefixes_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_global_public_delegated_prefixes_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - GlobalPublicDelegatedPrefixesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_global_public_delegated_prefixes_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1463,3 +1533,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = GlobalPublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_health_checks.py b/tests/unit/gapic/compute_v1/test_health_checks.py index 99c41f498..7acaa8d9e 100644 --- a/tests/unit/gapic/compute_v1/test_health_checks.py +++ b/tests/unit/gapic/compute_v1/test_health_checks.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.health_checks import HealthChecksClient from google.cloud.compute_v1.services.health_checks import pagers from google.cloud.compute_v1.services.health_checks import transports -from google.cloud.compute_v1.services.health_checks.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_health_checks_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_health_checks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_health_checks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_health_checks_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_health_checks_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_health_checks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_health_checks_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_health_checks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_health_checks_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_health_checks_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_health_checks_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_health_checks_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_health_checks_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_health_checks_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,31 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.HealthChecksAggregatedList( id="id_value", - items={ - "key_value": compute.HealthChecksScopedList( - health_checks=[compute.HealthCheck(check_interval_sec=1884)] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthChecksAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthChecksAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -447,24 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.HealthChecksScopedList( - health_checks=[compute.HealthCheck(check_interval_sec=1884)] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListHealthChecksRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -472,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.HealthChecksAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.HealthChecksAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthChecksAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/healthChecks" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -502,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.HealthChecksAggregatedList( @@ -541,10 +558,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.HealthChecksScopedList) assert pager.get("h") is None @@ -562,7 +578,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.HealthChecksScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -574,9 +590,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -586,7 +602,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -604,14 +619,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -622,7 +636,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -640,18 +653,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteHealthCheckRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -659,30 +694,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", health_check="health_check_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "health_check": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", health_check="health_check_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -699,9 +740,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetHealthCheckRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -710,30 +751,21 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetHealthCheckRe check_interval_sec=1884, creation_timestamp="creation_timestamp_value", description="description_value", - grpc_health_check=compute.GRPCHealthCheck( - grpc_service_name="grpc_service_name_value" - ), healthy_threshold=1819, - http2_health_check=compute.HTTP2HealthCheck(host="host_value"), - http_health_check=compute.HTTPHealthCheck(host="host_value"), - https_health_check=compute.HTTPSHealthCheck(host="host_value"), id=205, kind="kind_value", - log_config=compute.HealthCheckLogConfig(enable=True), name="name_value", region="region_value", self_link="self_link_value", - ssl_health_check=compute.SSLHealthCheck(port=453), - tcp_health_check=compute.TCPHealthCheck(port=453), timeout_sec=1185, type_=compute.HealthCheck.Type.GRPC, unhealthy_threshold=2046, ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheck.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheck.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -743,32 +775,48 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetHealthCheckRe assert response.check_interval_sec == 1884 assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.grpc_health_check == compute.GRPCHealthCheck( - grpc_service_name="grpc_service_name_value" - ) assert response.healthy_threshold == 1819 - assert response.http2_health_check == compute.HTTP2HealthCheck(host="host_value") - assert response.http_health_check == compute.HTTPHealthCheck(host="host_value") - assert response.https_health_check == compute.HTTPSHealthCheck(host="host_value") assert response.id == 205 assert response.kind == "kind_value" - assert response.log_config == compute.HealthCheckLogConfig(enable=True) assert response.name == "name_value" assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.ssl_health_check == compute.SSLHealthCheck(port=453) - assert response.tcp_health_check == compute.TCPHealthCheck(port=453) assert response.timeout_sec == 1185 assert response.type_ == compute.HealthCheck.Type.GRPC assert response.unhealthy_threshold == 2046 +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetHealthCheckRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -776,30 +824,36 @@ def test_get_rest_flattened(): return_value = compute.HealthCheck() # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheck.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheck.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", health_check="health_check_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "health_check": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", health_check="health_check_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -818,9 +872,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -830,7 +885,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -848,14 +902,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -866,7 +919,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -884,18 +936,41 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertHealthCheckRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -903,35 +978,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_resource = compute.HealthCheck(check_interval_sec=1884) - client.insert( - project="project_value", health_check_resource=health_check_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + health_check_resource=compute.HealthCheck(check_interval_sec=1884), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheck.to_json( - health_check_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/healthChecks" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -950,26 +1029,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.HealthCheckList( id="id_value", - items=[compute.HealthCheck(check_interval_sec=1884)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -977,19 +1054,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.HealthCheck(check_interval_sec=1884)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListHealthChecksRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -997,27 +1097,36 @@ def test_list_rest_flattened(): return_value = compute.HealthCheckList() # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/healthChecks" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1027,11 +1136,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.HealthCheckList( @@ -1061,16 +1172,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.HealthCheck) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1082,9 +1192,10 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1094,7 +1205,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1112,14 +1222,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1130,7 +1239,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1148,18 +1256,41 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchHealthCheckRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1167,38 +1298,40 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_resource = compute.HealthCheck(check_interval_sec=1884) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "health_check": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", health_check="health_check_value", - health_check_resource=health_check_resource, + health_check_resource=compute.HealthCheck(check_interval_sec=1884), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheck.to_json( - health_check_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1218,9 +1351,10 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1230,7 +1364,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1248,14 +1381,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1266,7 +1398,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1284,18 +1415,41 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateHealthCheckRequest +): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "health_check": "sample2"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1303,38 +1457,40 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_resource = compute.HealthCheck(check_interval_sec=1884) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "health_check": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", health_check="health_check_value", - health_check_resource=health_check_resource, + health_check_resource=compute.HealthCheck(check_interval_sec=1884), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheck.to_json( - health_check_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = HealthChecksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1429,8 +1585,10 @@ def test_health_checks_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_health_checks_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1454,29 +1612,6 @@ def test_health_checks_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_health_checks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.health_checks.transports.HealthChecksTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.HealthChecksTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_health_checks_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1488,7 +1623,6 @@ def test_health_checks_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_health_checks_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1504,21 +1638,6 @@ def test_health_checks_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_health_checks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - HealthChecksClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_health_checks_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1665,3 +1784,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = HealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_image_family_views.py b/tests/unit/gapic/compute_v1/test_image_family_views.py index 20138cd63..0b27d81e5 100644 --- a/tests/unit/gapic/compute_v1/test_image_family_views.py +++ b/tests/unit/gapic/compute_v1/test_image_family_views.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,32 +31,16 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.image_family_views import ImageFamilyViewsClient from google.cloud.compute_v1.services.image_family_views import transports -from google.cloud.compute_v1.services.image_family_views.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_image_family_views_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_image_family_views_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_image_family_views_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_image_family_views_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_image_family_views_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_image_family_views_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_image_family_views_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_image_family_views_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_image_family_views_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_image_family_views_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_image_family_views_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_image_family_views_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_image_family_views_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_image_family_views_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_image_family_views_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_image_family_views_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_image_family_views_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_image_family_views_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,36 +426,58 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "family": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.ImageFamilyView( - image=compute.Image(archive_size_bytes=1922), - ) + return_value = compute.ImageFamilyView() # Wrap the value into a proper Response obj - json_return_value = compute.ImageFamilyView.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ImageFamilyView.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.ImageFamilyView) - assert response.image == compute.Image(archive_size_bytes=1922) + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetImageFamilyViewRequest +): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "family": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ImageFamilyViewsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -470,31 +485,38 @@ def test_get_rest_flattened(): return_value = compute.ImageFamilyView() # Wrap the value into a proper Response obj - json_return_value = compute.ImageFamilyView.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ImageFamilyView.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "family": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", family="family_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "family_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/imageFamilyViews/{family}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ImageFamilyViewsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -581,8 +603,10 @@ def test_image_family_views_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_image_family_views_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -607,30 +631,6 @@ def test_image_family_views_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_image_family_views_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.image_family_views.transports.ImageFamilyViewsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ImageFamilyViewsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_image_family_views_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -642,7 +642,6 @@ def test_image_family_views_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_image_family_views_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -659,22 +658,6 @@ def test_image_family_views_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_image_family_views_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ImageFamilyViewsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_image_family_views_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -821,3 +804,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ImageFamilyViewsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_images.py b/tests/unit/gapic/compute_v1/test_images.py index 11e00a377..cc799895e 100644 --- a/tests/unit/gapic/compute_v1/test_images.py +++ b/tests/unit/gapic/compute_v1/test_images.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,31 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.images import ImagesClient from google.cloud.compute_v1.services.images import pagers from google.cloud.compute_v1.services.images import transports -from google.cloud.compute_v1.services.images.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -180,7 +166,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -189,6 +175,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -196,7 +183,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +192,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +200,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +209,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -240,7 +229,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -249,6 +238,7 @@ def test_images_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -279,7 +269,7 @@ def test_images_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -296,6 +286,7 @@ def test_images_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -320,7 +311,7 @@ def test_images_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -329,6 +320,7 @@ def test_images_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -341,7 +333,7 @@ def test_images_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,6 +342,7 @@ def test_images_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -364,7 +357,7 @@ def test_images_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -373,6 +366,7 @@ def test_images_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -387,7 +381,7 @@ def test_images_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -396,6 +390,7 @@ def test_images_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -404,9 +399,9 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteImageRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -416,7 +411,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteImageRe creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -434,14 +428,13 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteImageRe target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -452,7 +445,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteImageRe assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -470,18 +462,40 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteImageRe assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -489,30 +503,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", image="image_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "image": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", image="image_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "image_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{image}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -529,9 +549,12 @@ def test_deprecate_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request_init["deprecation_status_resource"] = compute.DeprecationStatus( + deleted="deleted_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -541,7 +564,6 @@ def test_deprecate_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -559,14 +581,13 @@ def test_deprecate_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.deprecate(request) @@ -577,7 +598,6 @@ def test_deprecate_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -595,18 +615,43 @@ def test_deprecate_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_deprecate_rest_bad_request( + transport: str = "rest", request_type=compute.DeprecateImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request_init["deprecation_status_resource"] = compute.DeprecationStatus( + deleted="deleted_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.deprecate(request) + + def test_deprecate_rest_from_dict(): test_deprecate_rest(request_type=dict) -def test_deprecate_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_deprecate_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -614,38 +659,42 @@ def test_deprecate_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - deprecation_status_resource = compute.DeprecationStatus(deleted="deleted_value") - client.deprecate( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "image": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", image="image_value", - deprecation_status_resource=deprecation_status_resource, + deprecation_status_resource=compute.DeprecationStatus( + deleted="deleted_value" + ), ) + mock_args.update(sample_request) + client.deprecate(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "image_value" in http_call[1] + str(body) + str(params) - assert compute.DeprecationStatus.to_json( - deprecation_status_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_deprecate_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{image}/deprecate" + % client.transport._host, + args[1], + ) + + +def test_deprecate_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -665,9 +714,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetImageRequest) credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -675,45 +724,22 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetImageRequest) return_value = compute.Image( archive_size_bytes=1922, creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", disk_size_gb=1261, family="family_value", - guest_os_features=[ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ], id=205, - image_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, license_codes=[1360], licenses=["licenses_value"], name="name_value", - raw_disk=compute.RawDisk(container_type=compute.RawDisk.ContainerType.TAR), satisfies_pzs=True, self_link="self_link_value", - shielded_instance_initial_state=compute.InitialStateConfig( - dbs=[compute.FileContentBuffer(content="content_value")] - ), source_disk="source_disk_value", - source_disk_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_disk_id="source_disk_id_value", source_image="source_image_value", - source_image_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_image_id="source_image_id_value", source_snapshot="source_snapshot_value", - source_snapshot_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_snapshot_id="source_snapshot_id_value", source_type=compute.Image.SourceType.RAW, status=compute.Image.Status.DELETING, @@ -721,9 +747,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetImageRequest) ) # Wrap the value into a proper Response obj - json_return_value = compute.Image.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Image.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -732,59 +758,59 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetImageRequest) assert isinstance(response, compute.Image) assert response.archive_size_bytes == 1922 assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.disk_size_gb == 1261 assert response.family == "family_value" - assert response.guest_os_features == [ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ] assert response.id == 205 - assert response.image_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.license_codes == [1360] assert response.licenses == ["licenses_value"] assert response.name == "name_value" - assert response.raw_disk == compute.RawDisk( - container_type=compute.RawDisk.ContainerType.TAR - ) assert response.satisfies_pzs is True assert response.self_link == "self_link_value" - assert response.shielded_instance_initial_state == compute.InitialStateConfig( - dbs=[compute.FileContentBuffer(content="content_value")] - ) assert response.source_disk == "source_disk_value" - assert response.source_disk_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_disk_id == "source_disk_id_value" assert response.source_image == "source_image_value" - assert response.source_image_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_image_id == "source_image_id_value" assert response.source_snapshot == "source_snapshot_value" - assert response.source_snapshot_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_snapshot_id == "source_snapshot_id_value" assert response.source_type == compute.Image.SourceType.RAW assert response.status == compute.Image.Status.DELETING assert response.storage_locations == ["storage_locations_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -792,30 +818,36 @@ def test_get_rest_flattened(): return_value = compute.Image() # Wrap the value into a proper Response obj - json_return_value = compute.Image.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Image.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", image="image_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "image": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", image="image_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "image_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{image}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -832,9 +864,9 @@ def test_get_from_family_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "family": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -842,45 +874,22 @@ def test_get_from_family_rest( return_value = compute.Image( archive_size_bytes=1922, creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", disk_size_gb=1261, family="family_value", - guest_os_features=[ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ], id=205, - image_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, license_codes=[1360], licenses=["licenses_value"], name="name_value", - raw_disk=compute.RawDisk(container_type=compute.RawDisk.ContainerType.TAR), satisfies_pzs=True, self_link="self_link_value", - shielded_instance_initial_state=compute.InitialStateConfig( - dbs=[compute.FileContentBuffer(content="content_value")] - ), source_disk="source_disk_value", - source_disk_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_disk_id="source_disk_id_value", source_image="source_image_value", - source_image_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_image_id="source_image_id_value", source_snapshot="source_snapshot_value", - source_snapshot_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_snapshot_id="source_snapshot_id_value", source_type=compute.Image.SourceType.RAW, status=compute.Image.Status.DELETING, @@ -888,9 +897,9 @@ def test_get_from_family_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.Image.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Image.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_from_family(request) @@ -899,59 +908,59 @@ def test_get_from_family_rest( assert isinstance(response, compute.Image) assert response.archive_size_bytes == 1922 assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.disk_size_gb == 1261 assert response.family == "family_value" - assert response.guest_os_features == [ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ] assert response.id == 205 - assert response.image_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.license_codes == [1360] assert response.licenses == ["licenses_value"] assert response.name == "name_value" - assert response.raw_disk == compute.RawDisk( - container_type=compute.RawDisk.ContainerType.TAR - ) assert response.satisfies_pzs is True assert response.self_link == "self_link_value" - assert response.shielded_instance_initial_state == compute.InitialStateConfig( - dbs=[compute.FileContentBuffer(content="content_value")] - ) assert response.source_disk == "source_disk_value" - assert response.source_disk_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_disk_id == "source_disk_id_value" assert response.source_image == "source_image_value" - assert response.source_image_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_image_id == "source_image_id_value" assert response.source_snapshot == "source_snapshot_value" - assert response.source_snapshot_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_snapshot_id == "source_snapshot_id_value" assert response.source_type == compute.Image.SourceType.RAW assert response.status == compute.Image.Status.DELETING assert response.storage_locations == ["storage_locations_value"] +def test_get_from_family_rest_bad_request( + transport: str = "rest", request_type=compute.GetFromFamilyImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "family": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_from_family(request) + + def test_get_from_family_rest_from_dict(): test_get_from_family_rest(request_type=dict) -def test_get_from_family_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_from_family_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -959,30 +968,36 @@ def test_get_from_family_rest_flattened(): return_value = compute.Image() # Wrap the value into a proper Response obj - json_return_value = compute.Image.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Image.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_from_family( - project="project_value", family="family_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "family": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", family="family_value",) + mock_args.update(sample_request) + client.get_from_family(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "family_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/family/{family}" + % client.transport._host, + args[1], + ) -def test_get_from_family_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_from_family_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1001,60 +1016,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1062,30 +1078,36 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - project="project_value", resource="resource_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", resource="resource_value",) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1102,9 +1124,10 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertImageRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["image_resource"] = compute.Image(archive_size_bytes=1922) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1114,7 +1137,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertImageRe creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1132,14 +1154,13 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertImageRe target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1150,7 +1171,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertImageRe assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1168,18 +1188,41 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertImageRe assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["image_resource"] = compute.Image(archive_size_bytes=1922) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1187,35 +1230,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - image_resource = compute.Image(archive_size_bytes=1922) - client.insert( - project="project_value", image_resource=image_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + image_resource=compute.Image(archive_size_bytes=1922), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Image.to_json( - image_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1232,26 +1279,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListImagesReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ImageList( id="id_value", - items=[compute.Image(archive_size_bytes=1922)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ImageList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ImageList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1259,19 +1304,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListImagesReque # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Image(archive_size_bytes=1922)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListImagesRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1279,27 +1347,36 @@ def test_list_rest_flattened(): return_value = compute.ImageList() # Wrap the value into a proper Response obj - json_return_value = compute.ImageList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ImageList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1309,11 +1386,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ImageList( @@ -1335,16 +1414,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Image) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1354,9 +1432,10 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchImageRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request_init["image_resource"] = compute.Image(archive_size_bytes=1922) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1366,7 +1445,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchImageRequ creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1384,14 +1462,13 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchImageRequ target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1402,7 +1479,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchImageRequ assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1420,18 +1496,41 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchImageRequ assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "image": "sample2"} + request_init["image_resource"] = compute.Image(archive_size_bytes=1922) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1439,36 +1538,40 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - image_resource = compute.Image(archive_size_bytes=1922) - client.patch( - project="project_value", image="image_value", image_resource=image_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "image": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + image="image_value", + image_resource=compute.Image(archive_size_bytes=1922), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "image_value" in http_call[1] + str(body) + str(params) - assert compute.Image.to_json( - image_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{image}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1488,60 +1591,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1549,40 +1659,42 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_policy_request_resource = compute.GlobalSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_policy_request_resource=global_set_policy_request_resource, + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetPolicyRequest.to_json( - global_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1604,9 +1716,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1616,7 +1731,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1634,14 +1748,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1652,7 +1765,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1670,18 +1782,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1689,40 +1826,42 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_labels_request_resource = compute.GlobalSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_labels_request_resource=global_set_labels_request_resource, + global_set_labels_request_resource=compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetLabelsRequest.to_json( - global_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1744,9 +1883,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1756,9 +1898,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1768,12 +1910,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsImageRequest +): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1781,40 +1951,42 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = ImagesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/images/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1915,8 +2087,10 @@ def test_images_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_images_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1940,29 +2114,6 @@ def test_images_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_images_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.images.transports.ImagesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ImagesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_images_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1974,7 +2125,6 @@ def test_images_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_images_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1990,21 +2140,6 @@ def test_images_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_images_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ImagesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_images_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2151,3 +2286,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ImagesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/tests/unit/gapic/compute_v1/test_instance_group_managers.py index d9660c0e9..9ab12f619 100644 --- a/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.instance_group_managers import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.instance_group_managers import pagers from google.cloud.compute_v1.services.instance_group_managers import transports -from google.cloud.compute_v1.services.instance_group_managers.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_instance_group_managers_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_instance_group_managers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_instance_group_managers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_instance_group_managers_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_instance_group_managers_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_instance_group_managers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_instance_group_managers_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_instance_group_managers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_instance_group_managers_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_instance_group_managers_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_instance_group_managers_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_instance_group_managers_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_instance_group_managers_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_instance_group_managers_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -455,9 +448,18 @@ def test_abandon_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_abandon_instances_request_resource" + ] = compute.InstanceGroupManagersAbandonInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -467,7 +469,6 @@ def test_abandon_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -485,14 +486,13 @@ def test_abandon_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.abandon_instances(request) @@ -503,7 +503,6 @@ def test_abandon_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -521,19 +520,49 @@ def test_abandon_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_abandon_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.AbandonInstancesInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_abandon_instances_request_resource" + ] = compute.InstanceGroupManagersAbandonInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_instances(request) + + def test_abandon_instances_rest_from_dict(): test_abandon_instances_rest(request_type=dict) -def test_abandon_instances_rest_flattened(): +def test_abandon_instances_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -542,43 +571,46 @@ def test_abandon_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_abandon_instances_request_resource = compute.InstanceGroupManagersAbandonInstancesRequest( - instances=["instances_value"] - ) - client.abandon_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_abandon_instances_request_resource=instance_group_managers_abandon_instances_request_resource, + instance_group_managers_abandon_instances_request_resource=compute.InstanceGroupManagersAbandonInstancesRequest( + instances=["instances_value"] + ), ) + mock_args.update(sample_request) + client.abandon_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersAbandonInstancesRequest.to_json( - instance_group_managers_abandon_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/abandonInstances" + % client.transport._host, + args[1], + ) -def test_abandon_instances_rest_flattened_error(): +def test_abandon_instances_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -603,41 +635,27 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManagerAggregatedList( id="id_value", - items={ - "key_value": compute.InstanceGroupManagersScopedList( - instance_group_managers=[ - compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagerAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -645,33 +663,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.InstanceGroupManagersScopedList( - instance_group_managers=[ - compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", + request_type=compute.AggregatedListInstanceGroupManagersRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): +def test_aggregated_list_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -680,30 +708,37 @@ def test_aggregated_list_rest_flattened(): return_value = compute.InstanceGroupManagerAggregatedList() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagerAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/instanceGroupManagers" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -715,13 +750,15 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = InstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupManagerAggregatedList( @@ -759,10 +796,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.InstanceGroupManagersScopedList) assert pager.get("h") is None @@ -780,7 +816,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.InstanceGroupManagersScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -793,9 +829,16 @@ def test_apply_updates_to_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_apply_updates_request_resource" + ] = compute.InstanceGroupManagersApplyUpdatesRequest(all_instances=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -805,7 +848,6 @@ def test_apply_updates_to_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -823,14 +865,13 @@ def test_apply_updates_to_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.apply_updates_to_instances(request) @@ -841,7 +882,6 @@ def test_apply_updates_to_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -859,19 +899,47 @@ def test_apply_updates_to_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_apply_updates_to_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.ApplyUpdatesToInstancesInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_apply_updates_request_resource" + ] = compute.InstanceGroupManagersApplyUpdatesRequest(all_instances=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_updates_to_instances(request) + + def test_apply_updates_to_instances_rest_from_dict(): test_apply_updates_to_instances_rest(request_type=dict) -def test_apply_updates_to_instances_rest_flattened(): +def test_apply_updates_to_instances_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -880,43 +948,46 @@ def test_apply_updates_to_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_apply_updates_request_resource = compute.InstanceGroupManagersApplyUpdatesRequest( - all_instances=True - ) - client.apply_updates_to_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_apply_updates_request_resource=instance_group_managers_apply_updates_request_resource, + instance_group_managers_apply_updates_request_resource=compute.InstanceGroupManagersApplyUpdatesRequest( + all_instances=True + ), ) + mock_args.update(sample_request) + client.apply_updates_to_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersApplyUpdatesRequest.to_json( - instance_group_managers_apply_updates_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances" + % client.transport._host, + args[1], + ) -def test_apply_updates_to_instances_rest_flattened_error(): +def test_apply_updates_to_instances_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -941,9 +1012,18 @@ def test_create_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_create_instances_request_resource" + ] = compute.InstanceGroupManagersCreateInstancesRequest( + instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -953,7 +1033,6 @@ def test_create_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -971,14 +1050,13 @@ def test_create_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instances(request) @@ -989,7 +1067,6 @@ def test_create_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1007,19 +1084,49 @@ def test_create_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_create_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.CreateInstancesInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_create_instances_request_resource" + ] = compute.InstanceGroupManagersCreateInstancesRequest( + instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instances(request) + + def test_create_instances_rest_from_dict(): test_create_instances_rest(request_type=dict) -def test_create_instances_rest_flattened(): +def test_create_instances_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1028,43 +1135,46 @@ def test_create_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_create_instances_request_resource = compute.InstanceGroupManagersCreateInstancesRequest( - instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] - ) - client.create_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_create_instances_request_resource=instance_group_managers_create_instances_request_resource, + instance_group_managers_create_instances_request_resource=compute.InstanceGroupManagersCreateInstancesRequest( + instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] + ), ) + mock_args.update(sample_request) + client.create_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersCreateInstancesRequest.to_json( - instance_group_managers_create_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/createInstances" + % client.transport._host, + args[1], + ) -def test_create_instances_rest_flattened_error(): +def test_create_instances_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1088,9 +1198,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1100,7 +1214,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1118,14 +1231,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -1136,7 +1248,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1154,19 +1265,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteInstanceGroupManagerRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1175,34 +1310,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1224,9 +1368,18 @@ def test_delete_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_delete_instances_request_resource" + ] = compute.InstanceGroupManagersDeleteInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1236,7 +1389,6 @@ def test_delete_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1254,14 +1406,13 @@ def test_delete_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instances(request) @@ -1272,7 +1423,6 @@ def test_delete_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1290,19 +1440,49 @@ def test_delete_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteInstancesInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_delete_instances_request_resource" + ] = compute.InstanceGroupManagersDeleteInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instances(request) + + def test_delete_instances_rest_from_dict(): test_delete_instances_rest(request_type=dict) -def test_delete_instances_rest_flattened(): +def test_delete_instances_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1311,43 +1491,46 @@ def test_delete_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_delete_instances_request_resource = compute.InstanceGroupManagersDeleteInstancesRequest( - instances=["instances_value"] - ) - client.delete_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_delete_instances_request_resource=instance_group_managers_delete_instances_request_resource, + instance_group_managers_delete_instances_request_resource=compute.InstanceGroupManagersDeleteInstancesRequest( + instances=["instances_value"] + ), ) + mock_args.update(sample_request) + client.delete_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersDeleteInstancesRequest.to_json( - instance_group_managers_delete_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deleteInstances" + % client.transport._host, + args[1], + ) -def test_delete_instances_rest_flattened_error(): +def test_delete_instances_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1372,9 +1555,16 @@ def test_delete_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ] = compute.InstanceGroupManagersDeletePerInstanceConfigsReq(names=["names_value"]) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1384,7 +1574,6 @@ def test_delete_per_instance_configs_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1402,14 +1591,13 @@ def test_delete_per_instance_configs_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_per_instance_configs(request) @@ -1420,7 +1608,6 @@ def test_delete_per_instance_configs_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1438,19 +1625,47 @@ def test_delete_per_instance_configs_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.DeletePerInstanceConfigsInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_delete_per_instance_configs_req_resource" + ] = compute.InstanceGroupManagersDeletePerInstanceConfigsReq(names=["names_value"]) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_per_instance_configs(request) + + def test_delete_per_instance_configs_rest_from_dict(): test_delete_per_instance_configs_rest(request_type=dict) -def test_delete_per_instance_configs_rest_flattened(): +def test_delete_per_instance_configs_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1459,43 +1674,46 @@ def test_delete_per_instance_configs_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_delete_per_instance_configs_req_resource = compute.InstanceGroupManagersDeletePerInstanceConfigsReq( - names=["names_value"] - ) - client.delete_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_delete_per_instance_configs_req_resource=instance_group_managers_delete_per_instance_configs_req_resource, + instance_group_managers_delete_per_instance_configs_req_resource=compute.InstanceGroupManagersDeletePerInstanceConfigsReq( + names=["names_value"] + ), ) + mock_args.update(sample_request) + client.delete_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersDeletePerInstanceConfigsReq.to_json( - instance_group_managers_delete_per_instance_configs_req_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_delete_per_instance_configs_rest_flattened_error(): +def test_delete_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1519,119 +1737,94 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ], base_instance_name="base_instance_name_value", creation_timestamp="creation_timestamp_value", - current_actions=compute.InstanceGroupManagerActionsSummary(abandoning=1041), description="description_value", - distribution_policy=compute.DistributionPolicy( - target_shape=compute.DistributionPolicy.TargetShape.ANY - ), fingerprint="fingerprint_value", id=205, instance_group="instance_group_value", instance_template="instance_template_value", kind="kind_value", name="name_value", - named_ports=[compute.NamedPort(name="name_value")], region="region_value", self_link="self_link_value", - stateful_policy=compute.StatefulPolicy( - preserved_state=compute.StatefulPolicyPreservedState( - disks={ - "key_value": compute.StatefulPolicyPreservedStateDiskDevice( - auto_delete=compute.StatefulPolicyPreservedStateDiskDevice.AutoDelete.NEVER - ) - } - ) - ), - status=compute.InstanceGroupManagerStatus(autoscaler="autoscaler_value"), target_pools=["target_pools_value"], target_size=1185, - update_policy=compute.InstanceGroupManagerUpdatePolicy( - instance_redistribution_type="instance_redistribution_type_value" - ), - versions=[ - compute.InstanceGroupManagerVersion( - instance_template="instance_template_value" - ) - ], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupManager.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupManager.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.InstanceGroupManager) - assert response.auto_healing_policies == [ - compute.InstanceGroupManagerAutoHealingPolicy(health_check="health_check_value") - ] assert response.base_instance_name == "base_instance_name_value" assert response.creation_timestamp == "creation_timestamp_value" - assert response.current_actions == compute.InstanceGroupManagerActionsSummary( - abandoning=1041 - ) assert response.description == "description_value" - assert response.distribution_policy == compute.DistributionPolicy( - target_shape=compute.DistributionPolicy.TargetShape.ANY - ) assert response.fingerprint == "fingerprint_value" assert response.id == 205 assert response.instance_group == "instance_group_value" assert response.instance_template == "instance_template_value" assert response.kind == "kind_value" assert response.name == "name_value" - assert response.named_ports == [compute.NamedPort(name="name_value")] assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.stateful_policy == compute.StatefulPolicy( - preserved_state=compute.StatefulPolicyPreservedState( - disks={ - "key_value": compute.StatefulPolicyPreservedStateDiskDevice( - auto_delete=compute.StatefulPolicyPreservedStateDiskDevice.AutoDelete.NEVER - ) - } - ) - ) - assert response.status == compute.InstanceGroupManagerStatus( - autoscaler="autoscaler_value" - ) assert response.target_pools == ["target_pools_value"] assert response.target_size == 1185 - assert response.update_policy == compute.InstanceGroupManagerUpdatePolicy( - instance_redistribution_type="instance_redistribution_type_value" - ) - assert response.versions == [ - compute.InstanceGroupManagerVersion(instance_template="instance_template_value") - ] assert response.zone == "zone_value" -def test_get_rest_from_dict(): +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInstanceGroupManagerRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + +def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1640,34 +1833,43 @@ def test_get_rest_flattened(): return_value = compute.InstanceGroupManager() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupManager.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupManager.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1688,9 +1890,16 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1700,7 +1909,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1718,14 +1926,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1736,7 +1943,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1754,19 +1960,46 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertInstanceGroupManagerRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1775,45 +2008,45 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_manager_resource = compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - instance_group_manager_resource=instance_group_manager_resource, + instance_group_manager_resource=compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManager.to_json( - instance_group_manager_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers" + % client.transport._host, + args[1], + ) -def test_insert_rest_flattened_error(): +def test_insert_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1840,34 +2073,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManagerList( id="id_value", - items=[ - compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupManagerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupManagerList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1875,28 +2098,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInstanceGroupManagersRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1905,31 +2141,35 @@ def test_list_rest_flattened(): return_value = compute.InstanceGroupManagerList() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupManagerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupManagerList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1942,13 +2182,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupManagerList( @@ -1978,16 +2220,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceGroupManager) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1999,53 +2240,70 @@ def test_list_errors_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManagersListErrorsResponse( - items=[ - compute.InstanceManagedByIgmError( - error=compute.InstanceManagedByIgmErrorManagedInstanceError( - code="code_value" - ) - ) - ], next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagersListErrorsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_errors(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListErrorsPager) - assert response.items == [ - compute.InstanceManagedByIgmError( - error=compute.InstanceManagedByIgmErrorManagedInstanceError( - code="code_value" - ) - ) - ] assert response.next_page_token == "next_page_token_value" +def test_list_errors_rest_bad_request( + transport: str = "rest", request_type=compute.ListErrorsInstanceGroupManagersRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_errors(request) + + def test_list_errors_rest_from_dict(): test_list_errors_rest(request_type=dict) -def test_list_errors_rest_flattened(): +def test_list_errors_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2054,36 +2312,45 @@ def test_list_errors_rest_flattened(): return_value = compute.InstanceGroupManagersListErrorsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagersListErrorsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_errors( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.list_errors(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listErrors" + % client.transport._host, + args[1], + ) -def test_list_errors_rest_flattened_error(): +def test_list_errors_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2097,13 +2364,15 @@ def test_list_errors_rest_flattened_error(): ) -def test_list_errors_pager(): +def test_list_errors_rest_pager(): client = InstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupManagersListErrorsResponse( @@ -2140,16 +2409,19 @@ def test_list_errors_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_errors(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } - assert pager._metadata == metadata + pager = client.list_errors(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceManagedByIgmError) for i in results) - pages = list(client.list_errors(request={}).pages) + pages = list(client.list_errors(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2162,49 +2434,71 @@ def test_list_managed_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManagersListManagedInstancesResponse( - managed_instances=[ - compute.ManagedInstance( - current_action=compute.ManagedInstance.CurrentAction.ABANDONING - ) - ], next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_managed_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListManagedInstancesPager) - assert response.managed_instances == [ - compute.ManagedInstance( - current_action=compute.ManagedInstance.CurrentAction.ABANDONING - ) - ] assert response.next_page_token == "next_page_token_value" +def test_list_managed_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.ListManagedInstancesInstanceGroupManagersRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_managed_instances(request) + + def test_list_managed_instances_rest_from_dict(): test_list_managed_instances_rest(request_type=dict) -def test_list_managed_instances_rest_flattened(): +def test_list_managed_instances_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2213,36 +2507,45 @@ def test_list_managed_instances_rest_flattened(): return_value = compute.InstanceGroupManagersListManagedInstancesResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagersListManagedInstancesResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_managed_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.list_managed_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listManagedInstances" + % client.transport._host, + args[1], + ) -def test_list_managed_instances_rest_flattened_error(): +def test_list_managed_instances_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2256,13 +2559,15 @@ def test_list_managed_instances_rest_flattened_error(): ) -def test_list_managed_instances_pager(): +def test_list_managed_instances_rest_pager(): client = InstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupManagersListManagedInstancesResponse( @@ -2300,16 +2605,19 @@ def test_list_managed_instances_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_managed_instances(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } - assert pager._metadata == metadata + pager = client.list_managed_instances(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ManagedInstance) for i in results) - pages = list(client.list_managed_instances(request={}).pages) + pages = list(client.list_managed_instances(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2322,45 +2630,71 @@ def test_list_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp( - items=[compute.PerInstanceConfig(fingerprint="fingerprint_value")], next_page_token="next_page_token_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_per_instance_configs(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPerInstanceConfigsPager) - assert response.items == [ - compute.PerInstanceConfig(fingerprint="fingerprint_value") - ] assert response.next_page_token == "next_page_token_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.ListPerInstanceConfigsInstanceGroupManagersRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_per_instance_configs(request) def test_list_per_instance_configs_rest_from_dict(): test_list_per_instance_configs_rest(request_type=dict) -def test_list_per_instance_configs_rest_flattened(): +def test_list_per_instance_configs_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2369,36 +2703,45 @@ def test_list_per_instance_configs_rest_flattened(): return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstanceGroupManagersListPerInstanceConfigsResp.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.list_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_list_per_instance_configs_rest_flattened_error(): +def test_list_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2412,13 +2755,15 @@ def test_list_per_instance_configs_rest_flattened_error(): ) -def test_list_per_instance_configs_pager(): +def test_list_per_instance_configs_rest_pager(): client = InstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupManagersListPerInstanceConfigsResp( @@ -2453,16 +2798,19 @@ def test_list_per_instance_configs_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_per_instance_configs(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } - assert pager._metadata == metadata + pager = client.list_per_instance_configs(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.PerInstanceConfig) for i in results) - pages = list(client.list_per_instance_configs(request={}).pages) + pages = list(client.list_per_instance_configs(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2474,9 +2822,20 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2486,7 +2845,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2504,14 +2862,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -2522,7 +2879,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2540,19 +2896,50 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchInstanceGroupManagerRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2561,47 +2948,50 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_manager_resource = compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_manager_resource=instance_group_manager_resource, + instance_group_manager_resource=compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManager.to_json( - instance_group_manager_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}" + % client.transport._host, + args[1], + ) -def test_patch_rest_flattened_error(): +def test_patch_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2630,9 +3020,20 @@ def test_patch_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ] = compute.InstanceGroupManagersPatchPerInstanceConfigsReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2642,7 +3043,6 @@ def test_patch_per_instance_configs_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2660,14 +3060,13 @@ def test_patch_per_instance_configs_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch_per_instance_configs(request) @@ -2678,7 +3077,6 @@ def test_patch_per_instance_configs_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2696,19 +3094,51 @@ def test_patch_per_instance_configs_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.PatchPerInstanceConfigsInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_patch_per_instance_configs_req_resource" + ] = compute.InstanceGroupManagersPatchPerInstanceConfigsReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_per_instance_configs(request) + + def test_patch_per_instance_configs_rest_from_dict(): test_patch_per_instance_configs_rest(request_type=dict) -def test_patch_per_instance_configs_rest_flattened(): +def test_patch_per_instance_configs_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2717,45 +3147,48 @@ def test_patch_per_instance_configs_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_patch_per_instance_configs_req_resource = compute.InstanceGroupManagersPatchPerInstanceConfigsReq( - per_instance_configs=[ - compute.PerInstanceConfig(fingerprint="fingerprint_value") - ] - ) - client.patch_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_patch_per_instance_configs_req_resource=instance_group_managers_patch_per_instance_configs_req_resource, + instance_group_managers_patch_per_instance_configs_req_resource=compute.InstanceGroupManagersPatchPerInstanceConfigsReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ), ) + mock_args.update(sample_request) + client.patch_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersPatchPerInstanceConfigsReq.to_json( - instance_group_managers_patch_per_instance_configs_req_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_patch_per_instance_configs_rest_flattened_error(): +def test_patch_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2782,9 +3215,18 @@ def test_recreate_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_recreate_instances_request_resource" + ] = compute.InstanceGroupManagersRecreateInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2794,7 +3236,6 @@ def test_recreate_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2812,14 +3253,13 @@ def test_recreate_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.recreate_instances(request) @@ -2830,7 +3270,6 @@ def test_recreate_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2848,19 +3287,49 @@ def test_recreate_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_recreate_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.RecreateInstancesInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_recreate_instances_request_resource" + ] = compute.InstanceGroupManagersRecreateInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recreate_instances(request) + + def test_recreate_instances_rest_from_dict(): test_recreate_instances_rest(request_type=dict) -def test_recreate_instances_rest_flattened(): +def test_recreate_instances_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2869,43 +3338,46 @@ def test_recreate_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_recreate_instances_request_resource = compute.InstanceGroupManagersRecreateInstancesRequest( - instances=["instances_value"] - ) - client.recreate_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_recreate_instances_request_resource=instance_group_managers_recreate_instances_request_resource, + instance_group_managers_recreate_instances_request_resource=compute.InstanceGroupManagersRecreateInstancesRequest( + instances=["instances_value"] + ), ) + mock_args.update(sample_request) + client.recreate_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersRecreateInstancesRequest.to_json( - instance_group_managers_recreate_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/recreateInstances" + % client.transport._host, + args[1], + ) -def test_recreate_instances_rest_flattened_error(): +def test_recreate_instances_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2929,9 +3401,13 @@ def test_resize_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2941,7 +3417,6 @@ def test_resize_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2959,14 +3434,13 @@ def test_resize_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.resize(request) @@ -2977,7 +3451,6 @@ def test_resize_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2995,19 +3468,43 @@ def test_resize_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_resize_rest_bad_request( + transport: str = "rest", request_type=compute.ResizeInstanceGroupManagerRequest +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + def test_resize_rest_from_dict(): test_resize_rest(request_type=dict) -def test_resize_rest_flattened(): +def test_resize_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -3016,36 +3513,44 @@ def test_resize_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resize( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", size=443, ) + mock_args.update(sample_request) + client.resize(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert str(443) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/resize" + % client.transport._host, + args[1], + ) -def test_resize_rest_flattened_error(): +def test_resize_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3068,9 +3573,18 @@ def test_set_instance_template_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_set_instance_template_request_resource" + ] = compute.InstanceGroupManagersSetInstanceTemplateRequest( + instance_template="instance_template_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3080,7 +3594,6 @@ def test_set_instance_template_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3098,14 +3611,13 @@ def test_set_instance_template_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_instance_template(request) @@ -3116,7 +3628,6 @@ def test_set_instance_template_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3134,19 +3645,49 @@ def test_set_instance_template_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_instance_template_rest_bad_request( + transport: str = "rest", + request_type=compute.SetInstanceTemplateInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_set_instance_template_request_resource" + ] = compute.InstanceGroupManagersSetInstanceTemplateRequest( + instance_template="instance_template_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_instance_template(request) + + def test_set_instance_template_rest_from_dict(): test_set_instance_template_rest(request_type=dict) -def test_set_instance_template_rest_flattened(): +def test_set_instance_template_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -3155,43 +3696,46 @@ def test_set_instance_template_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_set_instance_template_request_resource = compute.InstanceGroupManagersSetInstanceTemplateRequest( - instance_template="instance_template_value" - ) - client.set_instance_template( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_set_instance_template_request_resource=instance_group_managers_set_instance_template_request_resource, + instance_group_managers_set_instance_template_request_resource=compute.InstanceGroupManagersSetInstanceTemplateRequest( + instance_template="instance_template_value" + ), ) + mock_args.update(sample_request) + client.set_instance_template(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersSetInstanceTemplateRequest.to_json( - instance_group_managers_set_instance_template_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate" + % client.transport._host, + args[1], + ) -def test_set_instance_template_rest_flattened_error(): +def test_set_instance_template_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3216,9 +3760,18 @@ def test_set_target_pools_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_set_target_pools_request_resource" + ] = compute.InstanceGroupManagersSetTargetPoolsRequest( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3228,7 +3781,6 @@ def test_set_target_pools_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3246,14 +3798,13 @@ def test_set_target_pools_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_target_pools(request) @@ -3264,7 +3815,6 @@ def test_set_target_pools_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3282,19 +3832,49 @@ def test_set_target_pools_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_target_pools_rest_bad_request( + transport: str = "rest", + request_type=compute.SetTargetPoolsInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_set_target_pools_request_resource" + ] = compute.InstanceGroupManagersSetTargetPoolsRequest( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_pools(request) + + def test_set_target_pools_rest_from_dict(): test_set_target_pools_rest(request_type=dict) -def test_set_target_pools_rest_flattened(): +def test_set_target_pools_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -3303,43 +3883,46 @@ def test_set_target_pools_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_set_target_pools_request_resource = compute.InstanceGroupManagersSetTargetPoolsRequest( - fingerprint="fingerprint_value" - ) - client.set_target_pools( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_set_target_pools_request_resource=instance_group_managers_set_target_pools_request_resource, + instance_group_managers_set_target_pools_request_resource=compute.InstanceGroupManagersSetTargetPoolsRequest( + fingerprint="fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_target_pools(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersSetTargetPoolsRequest.to_json( - instance_group_managers_set_target_pools_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/setTargetPools" + % client.transport._host, + args[1], + ) -def test_set_target_pools_rest_flattened_error(): +def test_set_target_pools_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3364,9 +3947,20 @@ def test_update_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ] = compute.InstanceGroupManagersUpdatePerInstanceConfigsReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3376,7 +3970,6 @@ def test_update_per_instance_configs_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3394,14 +3987,13 @@ def test_update_per_instance_configs_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_per_instance_configs(request) @@ -3412,7 +4004,6 @@ def test_update_per_instance_configs_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3430,19 +4021,51 @@ def test_update_per_instance_configs_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.UpdatePerInstanceConfigsInstanceGroupManagerRequest, +): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "instance_group_managers_update_per_instance_configs_req_resource" + ] = compute.InstanceGroupManagersUpdatePerInstanceConfigsReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_per_instance_configs(request) + + def test_update_per_instance_configs_rest_from_dict(): test_update_per_instance_configs_rest(request_type=dict) -def test_update_per_instance_configs_rest_flattened(): +def test_update_per_instance_configs_rest_flattened(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -3451,45 +4074,48 @@ def test_update_per_instance_configs_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_managers_update_per_instance_configs_req_resource = compute.InstanceGroupManagersUpdatePerInstanceConfigsReq( - per_instance_configs=[ - compute.PerInstanceConfig(fingerprint="fingerprint_value") - ] - ) - client.update_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group_manager="instance_group_manager_value", - instance_group_managers_update_per_instance_configs_req_resource=instance_group_managers_update_per_instance_configs_req_resource, + instance_group_managers_update_per_instance_configs_req_resource=compute.InstanceGroupManagersUpdatePerInstanceConfigsReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ), ) + mock_args.update(sample_request) + client.update_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManagersUpdatePerInstanceConfigsReq.to_json( - instance_group_managers_update_per_instance_configs_req_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_update_per_instance_configs_rest_flattened_error(): +def test_update_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = InstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3605,8 +4231,10 @@ def test_instance_group_managers_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_instance_group_managers_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -3630,29 +4258,6 @@ def test_instance_group_managers_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_group_managers_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.instance_group_managers.transports.InstanceGroupManagersTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceGroupManagersTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_instance_group_managers_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -3664,7 +4269,6 @@ def test_instance_group_managers_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_instance_group_managers_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -3680,21 +4284,6 @@ def test_instance_group_managers_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_group_managers_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceGroupManagersClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_instance_group_managers_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -3841,3 +4430,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_instance_groups.py b/tests/unit/gapic/compute_v1/test_instance_groups.py index 4f645f482..8a057eca3 100644 --- a/tests/unit/gapic/compute_v1/test_instance_groups.py +++ b/tests/unit/gapic/compute_v1/test_instance_groups.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.instance_groups import InstanceGroupsClient from google.cloud.compute_v1.services.instance_groups import pagers from google.cloud.compute_v1.services.instance_groups import transports -from google.cloud.compute_v1.services.instance_groups.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_instance_groups_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_instance_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_instance_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_instance_groups_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -296,7 +284,7 @@ def test_instance_groups_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -313,6 +301,7 @@ def test_instance_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -337,7 +326,7 @@ def test_instance_groups_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -346,6 +335,7 @@ def test_instance_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -358,7 +348,7 @@ def test_instance_groups_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -367,6 +357,7 @@ def test_instance_groups_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -381,7 +372,7 @@ def test_instance_groups_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -390,6 +381,7 @@ def test_instance_groups_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -404,7 +396,7 @@ def test_instance_groups_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -413,6 +405,7 @@ def test_instance_groups_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -423,9 +416,18 @@ def test_add_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_add_instances_request_resource" + ] = compute.InstanceGroupsAddInstancesRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -435,7 +437,6 @@ def test_add_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -453,14 +454,13 @@ def test_add_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_instances(request) @@ -471,7 +471,6 @@ def test_add_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -489,18 +488,49 @@ def test_add_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_instances_rest_bad_request( + transport: str = "rest", request_type=compute.AddInstancesInstanceGroupRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_add_instances_request_resource" + ] = compute.InstanceGroupsAddInstancesRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_instances(request) + + def test_add_instances_rest_from_dict(): test_add_instances_rest(request_type=dict) -def test_add_instances_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_instances_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -508,42 +538,47 @@ def test_add_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_groups_add_instances_request_resource = compute.InstanceGroupsAddInstancesRequest( - instances=[compute.InstanceReference(instance="instance_value")] - ) - client.add_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group="instance_group_value", - instance_groups_add_instances_request_resource=instance_groups_add_instances_request_resource, + instance_groups_add_instances_request_resource=compute.InstanceGroupsAddInstancesRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ), ) + mock_args.update(sample_request) + client.add_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupsAddInstancesRequest.to_json( - instance_groups_add_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_instances_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/addInstances" + % client.transport._host, + args[1], + ) + + +def test_add_instances_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -566,35 +601,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupAggregatedList( id="id_value", - items={ - "key_value": compute.InstanceGroupsScopedList( - instance_groups=[ - compute.InstanceGroup( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -602,26 +627,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.InstanceGroupsScopedList( - instance_groups=[ - compute.InstanceGroup(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListInstanceGroupsRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -629,27 +671,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.InstanceGroupAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/instanceGroups" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -659,11 +710,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupAggregatedList( @@ -698,10 +751,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.InstanceGroupsScopedList) assert pager.get("h") is None @@ -719,7 +771,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.InstanceGroupsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -731,9 +783,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -743,7 +799,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -761,14 +816,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -779,7 +833,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -797,18 +850,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteInstanceGroupRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -816,33 +895,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group="instance_group_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -862,9 +952,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -876,7 +970,6 @@ def test_get_rest( id=205, kind="kind_value", name="name_value", - named_ports=[compute.NamedPort(name="name_value")], network="network_value", region="region_value", self_link="self_link_value", @@ -886,9 +979,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroup.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -901,7 +994,6 @@ def test_get_rest( assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.named_ports == [compute.NamedPort(name="name_value")] assert response.network == "network_value" assert response.region == "region_value" assert response.self_link == "self_link_value" @@ -910,12 +1002,41 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInstanceGroupRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -923,33 +1044,44 @@ def test_get_rest_flattened(): return_value = compute.InstanceGroup() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroup.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group="instance_group_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -969,9 +1101,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_group_resource"] = compute.InstanceGroup( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -981,7 +1116,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -999,14 +1133,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1017,7 +1150,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1035,18 +1167,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertInstanceGroupRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_group_resource"] = compute.InstanceGroup( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1054,40 +1211,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_resource = compute.InstanceGroup( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - instance_group_resource=instance_group_resource, + instance_group_resource=compute.InstanceGroup( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroup.to_json( - instance_group_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1109,28 +1268,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupList( id="id_value", - items=[ - compute.InstanceGroup(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1138,21 +1293,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.InstanceGroup(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInstanceGroupsRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1160,30 +1336,36 @@ def test_list_rest_flattened(): return_value = compute.InstanceGroupList() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1195,11 +1377,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupList( @@ -1229,16 +1413,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceGroup) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1250,26 +1433,33 @@ def test_list_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_list_instances_request_resource" + ] = compute.InstanceGroupsListInstancesRequest( + instance_state=compute.InstanceGroupsListInstancesRequest.InstanceState.ALL + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupsListInstances( id="id_value", - items=[compute.InstanceWithNamedPorts(instance="instance_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupsListInstances.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupsListInstances.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) @@ -1277,19 +1467,51 @@ def test_list_instances_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) assert response.id == "id_value" - assert response.items == [compute.InstanceWithNamedPorts(instance="instance_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_instances_rest_bad_request( + transport: str = "rest", request_type=compute.ListInstancesInstanceGroupsRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_list_instances_request_resource" + ] = compute.InstanceGroupsListInstancesRequest( + instance_state=compute.InstanceGroupsListInstancesRequest.InstanceState.ALL + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) def test_list_instances_rest_from_dict(): test_list_instances_rest(request_type=dict) -def test_list_instances_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_instances_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1297,42 +1519,47 @@ def test_list_instances_rest_flattened(): return_value = compute.InstanceGroupsListInstances() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupsListInstances.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupsListInstances.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_groups_list_instances_request_resource = compute.InstanceGroupsListInstancesRequest( - instance_state=compute.InstanceGroupsListInstancesRequest.InstanceState.ALL - ) - client.list_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group="instance_group_value", - instance_groups_list_instances_request_resource=instance_groups_list_instances_request_resource, + instance_groups_list_instances_request_resource=compute.InstanceGroupsListInstancesRequest( + instance_state=compute.InstanceGroupsListInstancesRequest.InstanceState.ALL + ), ) + mock_args.update(sample_request) + client.list_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupsListInstancesRequest.to_json( - instance_groups_list_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_list_instances_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/listInstances" + % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1348,11 +1575,13 @@ def test_list_instances_rest_flattened_error(): ) -def test_list_instances_pager(): +def test_list_instances_rest_pager(): client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceGroupsListInstances( @@ -1387,16 +1616,24 @@ def test_list_instances_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_instances(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + sample_request[ + "instance_groups_list_instances_request_resource" + ] = compute.InstanceGroupsListInstancesRequest( + instance_state=compute.InstanceGroupsListInstancesRequest.InstanceState.ALL + ) - assert pager._metadata == metadata + pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceWithNamedPorts) for i in results) - pages = list(client.list_instances(request={}).pages) + pages = list(client.list_instances(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1408,9 +1645,18 @@ def test_remove_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_remove_instances_request_resource" + ] = compute.InstanceGroupsRemoveInstancesRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1420,7 +1666,6 @@ def test_remove_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1438,14 +1683,13 @@ def test_remove_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_instances(request) @@ -1456,7 +1700,6 @@ def test_remove_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1474,18 +1717,49 @@ def test_remove_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_instances_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveInstancesInstanceGroupRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_remove_instances_request_resource" + ] = compute.InstanceGroupsRemoveInstancesRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_instances(request) + + def test_remove_instances_rest_from_dict(): test_remove_instances_rest(request_type=dict) -def test_remove_instances_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_instances_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1493,42 +1767,47 @@ def test_remove_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_groups_remove_instances_request_resource = compute.InstanceGroupsRemoveInstancesRequest( - instances=[compute.InstanceReference(instance="instance_value")] - ) - client.remove_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group="instance_group_value", - instance_groups_remove_instances_request_resource=instance_groups_remove_instances_request_resource, + instance_groups_remove_instances_request_resource=compute.InstanceGroupsRemoveInstancesRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ), ) + mock_args.update(sample_request) + client.remove_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupsRemoveInstancesRequest.to_json( - instance_groups_remove_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_instances_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/removeInstances" + % client.transport._host, + args[1], + ) + + +def test_remove_instances_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1551,9 +1830,16 @@ def test_set_named_ports_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_set_named_ports_request_resource" + ] = compute.InstanceGroupsSetNamedPortsRequest(fingerprint="fingerprint_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1563,7 +1849,6 @@ def test_set_named_ports_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1581,14 +1866,13 @@ def test_set_named_ports_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_named_ports(request) @@ -1599,7 +1883,6 @@ def test_set_named_ports_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1617,18 +1900,47 @@ def test_set_named_ports_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_named_ports_rest_bad_request( + transport: str = "rest", request_type=compute.SetNamedPortsInstanceGroupRequest +): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + request_init[ + "instance_groups_set_named_ports_request_resource" + ] = compute.InstanceGroupsSetNamedPortsRequest(fingerprint="fingerprint_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_named_ports(request) + + def test_set_named_ports_rest_from_dict(): test_set_named_ports_rest(request_type=dict) -def test_set_named_ports_rest_flattened(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_named_ports_rest_flattened(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1636,42 +1948,47 @@ def test_set_named_ports_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_groups_set_named_ports_request_resource = compute.InstanceGroupsSetNamedPortsRequest( - fingerprint="fingerprint_value" - ) - client.set_named_ports( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance_group="instance_group_value", - instance_groups_set_named_ports_request_resource=instance_groups_set_named_ports_request_resource, + instance_groups_set_named_ports_request_resource=compute.InstanceGroupsSetNamedPortsRequest( + fingerprint="fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_named_ports(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupsSetNamedPortsRequest.to_json( - instance_groups_set_named_ports_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_named_ports_rest_flattened_error(): - client = InstanceGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instanceGroups/{instance_group}/setNamedPorts" + % client.transport._host, + args[1], + ) + + +def test_set_named_ports_rest_flattened_error(transport: str = "rest"): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1771,8 +2088,10 @@ def test_instance_groups_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_instance_groups_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1796,29 +2115,6 @@ def test_instance_groups_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_groups_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.instance_groups.transports.InstanceGroupsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceGroupsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_instance_groups_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1830,7 +2126,6 @@ def test_instance_groups_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_instance_groups_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1846,21 +2141,6 @@ def test_instance_groups_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_groups_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceGroupsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_instance_groups_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2007,3 +2287,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_instance_templates.py b/tests/unit/gapic/compute_v1/test_instance_templates.py index aa93172ef..a96d274b6 100644 --- a/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.instance_templates import InstanceTemplatesClient from google.cloud.compute_v1.services.instance_templates import pagers from google.cloud.compute_v1.services.instance_templates import transports -from google.cloud.compute_v1.services.instance_templates.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_instance_templates_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_instance_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_instance_templates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_instance_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_instance_templates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_instance_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_instance_templates_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_instance_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_instance_templates_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_instance_templates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_instance_templates_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_instance_templates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_instance_templates_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_instance_templates_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_instance_templates_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_instance_templates_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_instance_templates_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_instance_templates_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "instance_template": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +439,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +456,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -482,7 +473,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +490,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteInstanceTemplateRequest +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "instance_template": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,30 +531,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "instance_template": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", instance_template="instance_template_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "instance_template_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -561,9 +581,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "instance_template": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -574,22 +594,14 @@ def test_get_rest( id=205, kind="kind_value", name="name_value", - properties=compute.InstanceProperties( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ), self_link="self_link_value", source_instance="source_instance_value", - source_instance_params=compute.SourceInstanceParams( - disk_configs=[compute.DiskInstantiationConfig(auto_delete=True)] - ), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceTemplate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceTemplate.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -601,24 +613,41 @@ def test_get_rest( assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.properties == compute.InstanceProperties( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) assert response.self_link == "self_link_value" assert response.source_instance == "source_instance_value" - assert response.source_instance_params == compute.SourceInstanceParams( - disk_configs=[compute.DiskInstantiationConfig(auto_delete=True)] + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInstanceTemplateRequest +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "instance_template": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -626,30 +655,38 @@ def test_get_rest_flattened(): return_value = compute.InstanceTemplate() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceTemplate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceTemplate.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "instance_template": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", instance_template="instance_template_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "instance_template_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates/{instance_template}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -668,60 +705,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyInstanceTemplateRequest +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -729,30 +767,36 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - project="project_value", resource="resource_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", resource="resource_value",) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -771,9 +815,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["instance_template_resource"] = compute.InstanceTemplate( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -783,7 +830,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -801,14 +847,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -819,7 +864,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -837,18 +881,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertInstanceTemplateRequest +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["instance_template_resource"] = compute.InstanceTemplate( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -856,38 +925,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_template_resource = compute.InstanceTemplate( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - instance_template_resource=instance_template_resource, + instance_template_resource=compute.InstanceTemplate( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceTemplate.to_json( - instance_template_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -908,28 +980,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceTemplateList( id="id_value", - items=[ - compute.InstanceTemplate(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceTemplateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceTemplateList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -937,21 +1005,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.InstanceTemplate(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInstanceTemplatesRequest +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -959,27 +1048,36 @@ def test_list_rest_flattened(): return_value = compute.InstanceTemplateList() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceTemplateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceTemplateList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -989,11 +1087,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceTemplateList( @@ -1023,16 +1123,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceTemplate) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1044,60 +1143,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyInstanceTemplateRequest +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1105,40 +1211,42 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_policy_request_resource = compute.GlobalSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_policy_request_resource=global_set_policy_request_resource, + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetPolicyRequest.to_json( - global_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1161,9 +1269,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1173,9 +1284,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1185,12 +1296,41 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsInstanceTemplateRequest, +): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1198,40 +1338,42 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = InstanceTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/instanceTemplates/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1330,8 +1472,10 @@ def test_instance_templates_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_instance_templates_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1355,29 +1499,6 @@ def test_instance_templates_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_templates_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.instance_templates.transports.InstanceTemplatesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstanceTemplatesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_instance_templates_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1389,7 +1510,6 @@ def test_instance_templates_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_instance_templates_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1405,21 +1525,6 @@ def test_instance_templates_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_instance_templates_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstanceTemplatesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_instance_templates_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1566,3 +1671,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstanceTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_instances.py b/tests/unit/gapic/compute_v1/test_instances.py index 74f3870c3..3e6b2d2b9 100644 --- a/tests/unit/gapic/compute_v1/test_instances.py +++ b/tests/unit/gapic/compute_v1/test_instances.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.instances import InstancesClient from google.cloud.compute_v1.services.instances import pagers from google.cloud.compute_v1.services.instances import transports -from google.cloud.compute_v1.services.instances.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_instances_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_instances_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_instances_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_instances_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_instances_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_instances_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_instances_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_instances_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_instances_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_instances_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_instances_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,9 +404,12 @@ def test_add_access_config_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["access_config_resource"] = compute.AccessConfig( + external_ipv6="external_ipv6_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -423,7 +419,6 @@ def test_add_access_config_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -441,14 +436,13 @@ def test_add_access_config_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_access_config(request) @@ -459,7 +453,6 @@ def test_add_access_config_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -477,18 +470,43 @@ def test_add_access_config_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_access_config_rest_bad_request( + transport: str = "rest", request_type=compute.AddAccessConfigInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["access_config_resource"] = compute.AccessConfig( + external_ipv6="external_ipv6_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_access_config(request) + + def test_add_access_config_rest_from_dict(): test_add_access_config_rest(request_type=dict) -def test_add_access_config_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_access_config_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,44 +514,48 @@ def test_add_access_config_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - access_config_resource = compute.AccessConfig( - external_ipv6="external_ipv6_value" - ) - client.add_access_config( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", network_interface="network_interface_value", - access_config_resource=access_config_resource, + access_config_resource=compute.AccessConfig( + external_ipv6="external_ipv6_value" + ), ) + mock_args.update(sample_request) + client.add_access_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert "network_interface_value" in http_call[1] + str(body) + str(params) - assert compute.AccessConfig.to_json( - access_config_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_access_config_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addAccessConfig" + % client.transport._host, + args[1], + ) + + +def test_add_access_config_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -557,9 +579,14 @@ def test_add_resource_policies_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_add_resource_policies_request_resource" + ] = compute.InstancesAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -569,7 +596,6 @@ def test_add_resource_policies_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -587,14 +613,13 @@ def test_add_resource_policies_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_resource_policies(request) @@ -605,7 +630,6 @@ def test_add_resource_policies_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -623,18 +647,45 @@ def test_add_resource_policies_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_resource_policies_rest_bad_request( + transport: str = "rest", request_type=compute.AddResourcePoliciesInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_add_resource_policies_request_resource" + ] = compute.InstancesAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies(request) + + def test_add_resource_policies_rest_from_dict(): test_add_resource_policies_rest(request_type=dict) -def test_add_resource_policies_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_resource_policies_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -642,42 +693,47 @@ def test_add_resource_policies_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_add_resource_policies_request_resource = compute.InstancesAddResourcePoliciesRequest( - resource_policies=["resource_policies_value"] - ) - client.add_resource_policies( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_add_resource_policies_request_resource=instances_add_resource_policies_request_resource, + instances_add_resource_policies_request_resource=compute.InstancesAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ), ) + mock_args.update(sample_request) + client.add_resource_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesAddResourcePoliciesRequest.to_json( - instances_add_resource_policies_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_resource_policies_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/addResourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_add_resource_policies_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -700,37 +756,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceAggregatedList( id="id_value", - items={ - "key_value": compute.InstancesScopedList( - instances=[ - compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -738,30 +782,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.InstancesScopedList( - instances=[ - compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListInstancesRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -769,27 +826,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.InstanceAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/instances" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -799,11 +865,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceAggregatedList( @@ -836,10 +904,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.InstancesScopedList) assert pager.get("h") is None @@ -854,7 +921,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.InstancesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -866,9 +933,10 @@ def test_attach_disk_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["attached_disk_resource"] = compute.AttachedDisk(auto_delete=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -878,7 +946,6 @@ def test_attach_disk_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -896,14 +963,13 @@ def test_attach_disk_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.attach_disk(request) @@ -914,7 +980,6 @@ def test_attach_disk_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -932,18 +997,41 @@ def test_attach_disk_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_attach_disk_rest_bad_request( + transport: str = "rest", request_type=compute.AttachDiskInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["attached_disk_resource"] = compute.AttachedDisk(auto_delete=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_disk(request) + + def test_attach_disk_rest_from_dict(): test_attach_disk_rest(request_type=dict) -def test_attach_disk_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_attach_disk_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -951,40 +1039,45 @@ def test_attach_disk_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - attached_disk_resource = compute.AttachedDisk(auto_delete=True) - client.attach_disk( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - attached_disk_resource=attached_disk_resource, + attached_disk_resource=compute.AttachedDisk(auto_delete=True), ) + mock_args.update(sample_request) + client.attach_disk(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.AttachedDisk.to_json( - attached_disk_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_attach_disk_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/attachDisk" + % client.transport._host, + args[1], + ) + + +def test_attach_disk_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1005,9 +1098,12 @@ def test_bulk_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init[ + "bulk_insert_instance_resource_resource" + ] = compute.BulkInsertInstanceResource(count=553) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1017,7 +1113,6 @@ def test_bulk_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1035,14 +1130,13 @@ def test_bulk_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.bulk_insert(request) @@ -1053,7 +1147,6 @@ def test_bulk_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1071,18 +1164,43 @@ def test_bulk_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_bulk_insert_rest_bad_request( + transport: str = "rest", request_type=compute.BulkInsertInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init[ + "bulk_insert_instance_resource_resource" + ] = compute.BulkInsertInstanceResource(count=553) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert(request) + + def test_bulk_insert_rest_from_dict(): test_bulk_insert_rest(request_type=dict) -def test_bulk_insert_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_bulk_insert_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1090,40 +1208,42 @@ def test_bulk_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - bulk_insert_instance_resource_resource = compute.BulkInsertInstanceResource( - count=553 - ) - client.bulk_insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - bulk_insert_instance_resource_resource=bulk_insert_instance_resource_resource, + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource( + count=553 + ), ) + mock_args.update(sample_request) + client.bulk_insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.BulkInsertInstanceResource.to_json( - bulk_insert_instance_resource_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_bulk_insert_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/bulkInsert" + % client.transport._host, + args[1], + ) + + +def test_bulk_insert_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1145,9 +1265,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1157,7 +1277,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1175,14 +1294,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -1193,7 +1311,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1211,18 +1328,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1230,31 +1369,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1274,9 +1424,9 @@ def test_delete_access_config_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1286,7 +1436,6 @@ def test_delete_access_config_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1304,14 +1453,13 @@ def test_delete_access_config_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_access_config(request) @@ -1322,7 +1470,6 @@ def test_delete_access_config_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1340,18 +1487,40 @@ def test_delete_access_config_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_access_config_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteAccessConfigInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_access_config(request) + + def test_delete_access_config_rest_from_dict(): test_delete_access_config_rest(request_type=dict) -def test_delete_access_config_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_access_config_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1359,37 +1528,46 @@ def test_delete_access_config_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_access_config( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", access_config="access_config_value", network_interface="network_interface_value", ) + mock_args.update(sample_request) + client.delete_access_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert "access_config_value" in http_call[1] + str(body) + str(params) - assert "network_interface_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/deleteAccessConfig" + % client.transport._host, + args[1], + ) -def test_delete_access_config_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_access_config_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1411,9 +1589,9 @@ def test_detach_disk_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1423,7 +1601,6 @@ def test_detach_disk_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1441,14 +1618,13 @@ def test_detach_disk_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.detach_disk(request) @@ -1459,7 +1635,6 @@ def test_detach_disk_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1477,18 +1652,40 @@ def test_detach_disk_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" -def test_detach_disk_rest_from_dict(): - test_detach_disk_rest(request_type=dict) +def test_detach_disk_rest_bad_request( + transport: str = "rest", request_type=compute.DetachDiskInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_disk(request) + + +def test_detach_disk_rest_from_dict(): + test_detach_disk_rest(request_type=dict) -def test_detach_disk_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + +def test_detach_disk_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1496,35 +1693,45 @@ def test_detach_disk_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.detach_disk( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", device_name="device_name_value", ) + mock_args.update(sample_request) + client.detach_disk(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert "device_name_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/detachDisk" + % client.transport._host, + args[1], + ) -def test_detach_disk_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_detach_disk_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1543,146 +1750,110 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetInstanceReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ), can_ip_forward=True, - confidential_instance_config=compute.ConfidentialInstanceConfig( - enable_confidential_compute=True - ), cpu_platform="cpu_platform_value", creation_timestamp="creation_timestamp_value", deletion_protection=True, description="description_value", - disks=[compute.AttachedDisk(auto_delete=True)], - display_device=compute.DisplayDevice(enable_display=True), fingerprint="fingerprint_value", - guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)], hostname="hostname_value", id=205, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, last_start_timestamp="last_start_timestamp_value", last_stop_timestamp="last_stop_timestamp_value", last_suspended_timestamp="last_suspended_timestamp_value", machine_type="machine_type_value", - metadata=compute.Metadata(fingerprint="fingerprint_value"), min_cpu_platform="min_cpu_platform_value", name="name_value", - network_interfaces=[ - compute.NetworkInterface( - access_configs=[ - compute.AccessConfig(external_ipv6="external_ipv6_value") - ] - ) - ], private_ipv6_google_access=compute.Instance.PrivateIpv6GoogleAccess.ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE, - reservation_affinity=compute.ReservationAffinity( - consume_reservation_type=compute.ReservationAffinity.ConsumeReservationType.ANY_RESERVATION - ), resource_policies=["resource_policies_value"], satisfies_pzs=True, - scheduling=compute.Scheduling(automatic_restart=True), self_link="self_link_value", - service_accounts=[compute.ServiceAccount(email="email_value")], - shielded_instance_config=compute.ShieldedInstanceConfig( - enable_integrity_monitoring=True - ), - shielded_instance_integrity_policy=compute.ShieldedInstanceIntegrityPolicy( - update_auto_learn_policy=True - ), start_restricted=True, status=compute.Instance.Status.DEPROVISIONING, status_message="status_message_value", - tags=compute.Tags(fingerprint="fingerprint_value"), zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Instance.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Instance.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Instance) - assert response.advanced_machine_features == compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) assert response.can_ip_forward is True - assert response.confidential_instance_config == compute.ConfidentialInstanceConfig( - enable_confidential_compute=True - ) assert response.cpu_platform == "cpu_platform_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.deletion_protection is True assert response.description == "description_value" - assert response.disks == [compute.AttachedDisk(auto_delete=True)] - assert response.display_device == compute.DisplayDevice(enable_display=True) assert response.fingerprint == "fingerprint_value" - assert response.guest_accelerators == [ - compute.AcceleratorConfig(accelerator_count=1805) - ] assert response.hostname == "hostname_value" assert response.id == 205 assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.last_start_timestamp == "last_start_timestamp_value" assert response.last_stop_timestamp == "last_stop_timestamp_value" assert response.last_suspended_timestamp == "last_suspended_timestamp_value" assert response.machine_type == "machine_type_value" - assert response.metadata == compute.Metadata(fingerprint="fingerprint_value") assert response.min_cpu_platform == "min_cpu_platform_value" assert response.name == "name_value" - assert response.network_interfaces == [ - compute.NetworkInterface( - access_configs=[compute.AccessConfig(external_ipv6="external_ipv6_value")] - ) - ] assert ( response.private_ipv6_google_access == compute.Instance.PrivateIpv6GoogleAccess.ENABLE_BIDIRECTIONAL_ACCESS_TO_GOOGLE ) - assert response.reservation_affinity == compute.ReservationAffinity( - consume_reservation_type=compute.ReservationAffinity.ConsumeReservationType.ANY_RESERVATION - ) assert response.resource_policies == ["resource_policies_value"] assert response.satisfies_pzs is True - assert response.scheduling == compute.Scheduling(automatic_restart=True) assert response.self_link == "self_link_value" - assert response.service_accounts == [compute.ServiceAccount(email="email_value")] - assert response.shielded_instance_config == compute.ShieldedInstanceConfig( - enable_integrity_monitoring=True - ) - assert ( - response.shielded_instance_integrity_policy - == compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True) - ) assert response.start_restricted is True assert response.status == compute.Instance.Status.DEPROVISIONING assert response.status_message == "status_message_value" - assert response.tags == compute.Tags(fingerprint="fingerprint_value") assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1690,31 +1861,42 @@ def test_get_rest_flattened(): return_value = compute.Instance() # Wrap the value into a proper Response obj - json_return_value = compute.Instance.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Instance.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1734,54 +1916,60 @@ def test_get_effective_firewalls_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.InstancesGetEffectiveFirewallsResponse( - firewall_policys=[ - compute.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy( - display_name="display_name_value" - ) - ], - firewalls=[ - compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] - ) - ], - ) + return_value = compute.InstancesGetEffectiveFirewallsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_effective_firewalls(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.InstancesGetEffectiveFirewallsResponse) - assert response.firewall_policys == [ - compute.InstancesGetEffectiveFirewallsResponseEffectiveFirewallPolicy( - display_name="display_name_value" - ) - ] - assert response.firewalls == [ - compute.Firewall(allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")]) - ] + + +def test_get_effective_firewalls_rest_bad_request( + transport: str = "rest", request_type=compute.GetEffectiveFirewallsInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_effective_firewalls(request) def test_get_effective_firewalls_rest_from_dict(): test_get_effective_firewalls_rest(request_type=dict) -def test_get_effective_firewalls_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_effective_firewalls_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1789,37 +1977,47 @@ def test_get_effective_firewalls_rest_flattened(): return_value = compute.InstancesGetEffectiveFirewallsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InstancesGetEffectiveFirewallsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_effective_firewalls( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", network_interface="network_interface_value", ) + mock_args.update(sample_request) + client.get_effective_firewalls(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert "network_interface_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getEffectiveFirewalls" + % client.transport._host, + args[1], + ) -def test_get_effective_firewalls_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_effective_firewalls_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1840,9 +2038,9 @@ def test_get_guest_attributes_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1850,18 +2048,15 @@ def test_get_guest_attributes_rest( return_value = compute.GuestAttributes( kind="kind_value", query_path="query_path_value", - query_value=compute.GuestAttributesValue( - items=[compute.GuestAttributesEntry(key="key_value")] - ), self_link="self_link_value", variable_key="variable_key_value", variable_value="variable_value_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.GuestAttributes.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.GuestAttributes.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_guest_attributes(request) @@ -1870,20 +2065,42 @@ def test_get_guest_attributes_rest( assert isinstance(response, compute.GuestAttributes) assert response.kind == "kind_value" assert response.query_path == "query_path_value" - assert response.query_value == compute.GuestAttributesValue( - items=[compute.GuestAttributesEntry(key="key_value")] - ) assert response.self_link == "self_link_value" assert response.variable_key == "variable_key_value" assert response.variable_value == "variable_value_value" +def test_get_guest_attributes_rest_bad_request( + transport: str = "rest", request_type=compute.GetGuestAttributesInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_guest_attributes(request) + + def test_get_guest_attributes_rest_from_dict(): test_get_guest_attributes_rest(request_type=dict) -def test_get_guest_attributes_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_guest_attributes_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1891,31 +2108,42 @@ def test_get_guest_attributes_rest_flattened(): return_value = compute.GuestAttributes() # Wrap the value into a proper Response obj - json_return_value = compute.GuestAttributes.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.GuestAttributes.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_guest_attributes( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.get_guest_attributes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getGuestAttributes" + % client.transport._host, + args[1], + ) -def test_get_guest_attributes_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_guest_attributes_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1935,60 +2163,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1996,31 +2225,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2040,9 +2280,9 @@ def test_get_screenshot_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2050,9 +2290,9 @@ def test_get_screenshot_rest( return_value = compute.Screenshot(contents="contents_value", kind="kind_value",) # Wrap the value into a proper Response obj - json_return_value = compute.Screenshot.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Screenshot.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_screenshot(request) @@ -2063,12 +2303,37 @@ def test_get_screenshot_rest( assert response.kind == "kind_value" +def test_get_screenshot_rest_bad_request( + transport: str = "rest", request_type=compute.GetScreenshotInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_screenshot(request) + + def test_get_screenshot_rest_from_dict(): test_get_screenshot_rest(request_type=dict) -def test_get_screenshot_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_screenshot_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2076,31 +2341,42 @@ def test_get_screenshot_rest_flattened(): return_value = compute.Screenshot() # Wrap the value into a proper Response obj - json_return_value = compute.Screenshot.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Screenshot.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_screenshot( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.get_screenshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/screenshot" + % client.transport._host, + args[1], + ) -def test_get_screenshot_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_screenshot_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2120,9 +2396,9 @@ def test_get_serial_port_output_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2136,9 +2412,9 @@ def test_get_serial_port_output_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.SerialPortOutput.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SerialPortOutput.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_serial_port_output(request) @@ -2152,12 +2428,37 @@ def test_get_serial_port_output_rest( assert response.start == 558 +def test_get_serial_port_output_rest_bad_request( + transport: str = "rest", request_type=compute.GetSerialPortOutputInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_serial_port_output(request) + + def test_get_serial_port_output_rest_from_dict(): test_get_serial_port_output_rest(request_type=dict) -def test_get_serial_port_output_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_serial_port_output_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2165,31 +2466,42 @@ def test_get_serial_port_output_rest_flattened(): return_value = compute.SerialPortOutput() # Wrap the value into a proper Response obj - json_return_value = compute.SerialPortOutput.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SerialPortOutput.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_serial_port_output( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.get_serial_port_output(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/serialPort" + % client.transport._host, + args[1], + ) -def test_get_serial_port_output_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_serial_port_output_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2210,46 +2522,60 @@ def test_get_shielded_instance_identity_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.ShieldedInstanceIdentity( - encryption_key=compute.ShieldedInstanceIdentityEntry( - ek_cert="ek_cert_value" - ), - kind="kind_value", - signing_key=compute.ShieldedInstanceIdentityEntry(ek_cert="ek_cert_value"), - ) + return_value = compute.ShieldedInstanceIdentity(kind="kind_value",) # Wrap the value into a proper Response obj - json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_shielded_instance_identity(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.ShieldedInstanceIdentity) - assert response.encryption_key == compute.ShieldedInstanceIdentityEntry( - ek_cert="ek_cert_value" - ) assert response.kind == "kind_value" - assert response.signing_key == compute.ShieldedInstanceIdentityEntry( - ek_cert="ek_cert_value" + + +def test_get_shielded_instance_identity_rest_bad_request( + transport: str = "rest", + request_type=compute.GetShieldedInstanceIdentityInstanceRequest, +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_shielded_instance_identity(request) + def test_get_shielded_instance_identity_rest_from_dict(): test_get_shielded_instance_identity_rest(request_type=dict) -def test_get_shielded_instance_identity_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_shielded_instance_identity_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2257,31 +2583,42 @@ def test_get_shielded_instance_identity_rest_flattened(): return_value = compute.ShieldedInstanceIdentity() # Wrap the value into a proper Response obj - json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ShieldedInstanceIdentity.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_shielded_instance_identity( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.get_shielded_instance_identity(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/getShieldedInstanceIdentity" + % client.transport._host, + args[1], + ) -def test_get_shielded_instance_identity_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_shielded_instance_identity_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2301,9 +2638,14 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_resource"] = compute.Instance( + advanced_machine_features=compute.AdvancedMachineFeatures( + enable_nested_virtualization=True + ) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2313,7 +2655,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2331,14 +2672,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -2349,7 +2689,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2367,18 +2706,45 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instance_resource"] = compute.Instance( + advanced_machine_features=compute.AdvancedMachineFeatures( + enable_nested_virtualization=True + ) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2386,42 +2752,44 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_resource = compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - instance_resource=instance_resource, + instance_resource=compute.Instance( + advanced_machine_features=compute.AdvancedMachineFeatures( + enable_nested_virtualization=True + ) + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.Instance.to_json( - instance_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2443,32 +2811,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListInstancesRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceList( id="id_value", - items=[ - compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -2476,25 +2836,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListInstancesRe # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInstancesRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2502,30 +2879,36 @@ def test_list_rest_flattened(): return_value = compute.InstanceList() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2535,11 +2918,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceList( @@ -2561,16 +2946,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Instance) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2582,26 +2966,24 @@ def test_list_referrers_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceListReferrers( id="id_value", - items=[compute.Reference(kind="kind_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceListReferrers.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceListReferrers.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_referrers(request) @@ -2609,19 +2991,42 @@ def test_list_referrers_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListReferrersPager) assert response.id == "id_value" - assert response.items == [compute.Reference(kind="kind_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_referrers_rest_bad_request( + transport: str = "rest", request_type=compute.ListReferrersInstancesRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_referrers(request) def test_list_referrers_rest_from_dict(): test_list_referrers_rest(request_type=dict) -def test_list_referrers_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_referrers_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2629,31 +3034,42 @@ def test_list_referrers_rest_flattened(): return_value = compute.InstanceListReferrers() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceListReferrers.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceListReferrers.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_referrers( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.list_referrers(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/referrers" + % client.transport._host, + args[1], + ) -def test_list_referrers_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_referrers_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2666,11 +3082,13 @@ def test_list_referrers_rest_flattened_error(): ) -def test_list_referrers_pager(): +def test_list_referrers_rest_pager(): client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InstanceListReferrers( @@ -2696,16 +3114,19 @@ def test_list_referrers_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_referrers(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } - assert pager._metadata == metadata + pager = client.list_referrers(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Reference) for i in results) - pages = list(client.list_referrers(request={}).pages) + pages = list(client.list_referrers(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2717,9 +3138,14 @@ def test_remove_resource_policies_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_remove_resource_policies_request_resource" + ] = compute.InstancesRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2729,7 +3155,6 @@ def test_remove_resource_policies_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2747,14 +3172,13 @@ def test_remove_resource_policies_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_resource_policies(request) @@ -2765,7 +3189,6 @@ def test_remove_resource_policies_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2783,18 +3206,45 @@ def test_remove_resource_policies_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_resource_policies_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveResourcePoliciesInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_remove_resource_policies_request_resource" + ] = compute.InstancesRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies(request) + + def test_remove_resource_policies_rest_from_dict(): test_remove_resource_policies_rest(request_type=dict) -def test_remove_resource_policies_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_resource_policies_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2802,42 +3252,47 @@ def test_remove_resource_policies_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_remove_resource_policies_request_resource = compute.InstancesRemoveResourcePoliciesRequest( - resource_policies=["resource_policies_value"] - ) - client.remove_resource_policies( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_remove_resource_policies_request_resource=instances_remove_resource_policies_request_resource, + instances_remove_resource_policies_request_resource=compute.InstancesRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ), ) + mock_args.update(sample_request) + client.remove_resource_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesRemoveResourcePoliciesRequest.to_json( - instances_remove_resource_policies_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_resource_policies_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/removeResourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_remove_resource_policies_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2858,9 +3313,9 @@ def test_reset_rest(transport: str = "rest", request_type=compute.ResetInstanceR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2870,7 +3325,6 @@ def test_reset_rest(transport: str = "rest", request_type=compute.ResetInstanceR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2888,14 +3342,13 @@ def test_reset_rest(transport: str = "rest", request_type=compute.ResetInstanceR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.reset(request) @@ -2906,7 +3359,6 @@ def test_reset_rest(transport: str = "rest", request_type=compute.ResetInstanceR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2924,18 +3376,40 @@ def test_reset_rest(transport: str = "rest", request_type=compute.ResetInstanceR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_reset_rest_bad_request( + transport: str = "rest", request_type=compute.ResetInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.reset(request) + + def test_reset_rest_from_dict(): test_reset_rest(request_type=dict) -def test_reset_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_reset_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2943,31 +3417,42 @@ def test_reset_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.reset( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.reset(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/reset" + % client.transport._host, + args[1], + ) -def test_reset_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_reset_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2987,9 +3472,9 @@ def test_send_diagnostic_interrupt_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2997,11 +3482,11 @@ def test_send_diagnostic_interrupt_rest( return_value = compute.SendDiagnosticInterruptInstanceResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.send_diagnostic_interrupt(request) @@ -3010,12 +3495,37 @@ def test_send_diagnostic_interrupt_rest( assert isinstance(response, compute.SendDiagnosticInterruptInstanceResponse) +def test_send_diagnostic_interrupt_rest_bad_request( + transport: str = "rest", request_type=compute.SendDiagnosticInterruptInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.send_diagnostic_interrupt(request) + + def test_send_diagnostic_interrupt_rest_from_dict(): test_send_diagnostic_interrupt_rest(request_type=dict) -def test_send_diagnostic_interrupt_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_send_diagnostic_interrupt_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3023,33 +3533,44 @@ def test_send_diagnostic_interrupt_rest_flattened(): return_value = compute.SendDiagnosticInterruptInstanceResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.SendDiagnosticInterruptInstanceResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.send_diagnostic_interrupt( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.send_diagnostic_interrupt(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/sendDiagnosticInterrupt" + % client.transport._host, + args[1], + ) -def test_send_diagnostic_interrupt_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_send_diagnostic_interrupt_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3069,9 +3590,9 @@ def test_set_deletion_protection_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3081,7 +3602,6 @@ def test_set_deletion_protection_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3099,14 +3619,13 @@ def test_set_deletion_protection_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_deletion_protection(request) @@ -3117,7 +3636,6 @@ def test_set_deletion_protection_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3135,18 +3653,40 @@ def test_set_deletion_protection_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_deletion_protection_rest_bad_request( + transport: str = "rest", request_type=compute.SetDeletionProtectionInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_deletion_protection(request) + + def test_set_deletion_protection_rest_from_dict(): test_set_deletion_protection_rest(request_type=dict) -def test_set_deletion_protection_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_deletion_protection_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3154,31 +3694,42 @@ def test_set_deletion_protection_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_deletion_protection( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", ) + mock_args.update(sample_request) + client.set_deletion_protection(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setDeletionProtection" + % client.transport._host, + args[1], + ) -def test_set_deletion_protection_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_deletion_protection_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3198,9 +3749,9 @@ def test_set_disk_auto_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3210,7 +3761,6 @@ def test_set_disk_auto_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3228,14 +3778,13 @@ def test_set_disk_auto_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_disk_auto_delete(request) @@ -3246,7 +3795,6 @@ def test_set_disk_auto_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3264,18 +3812,40 @@ def test_set_disk_auto_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_disk_auto_delete_rest_bad_request( + transport: str = "rest", request_type=compute.SetDiskAutoDeleteInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_disk_auto_delete(request) + + def test_set_disk_auto_delete_rest_from_dict(): test_set_disk_auto_delete_rest(request_type=dict) -def test_set_disk_auto_delete_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_disk_auto_delete_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3283,37 +3853,46 @@ def test_set_disk_auto_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.set_disk_auto_delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", auto_delete=True, device_name="device_name_value", ) + mock_args.update(sample_request) + client.set_disk_auto_delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert str(True) in http_call[1] + str(body) + str(params) - assert "device_name_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setDiskAutoDelete" + % client.transport._host, + args[1], + ) -def test_set_disk_auto_delete_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_disk_auto_delete_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3335,60 +3914,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - response = client.set_iam_policy(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] - assert response.etag == "etag_value" - assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] - assert response.version == 774 + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3396,42 +3982,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - zone_set_policy_request_resource = compute.ZoneSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - zone_set_policy_request_resource=zone_set_policy_request_resource, + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.ZoneSetPolicyRequest.to_json( - zone_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3454,9 +4045,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_labels_request_resource" + ] = compute.InstancesSetLabelsRequest(label_fingerprint="label_fingerprint_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3466,7 +4060,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3484,14 +4077,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -3502,7 +4094,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3520,18 +4111,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_labels_request_resource" + ] = compute.InstancesSetLabelsRequest(label_fingerprint="label_fingerprint_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3539,42 +4155,47 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_set_labels_request_resource = compute.InstancesSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_set_labels_request_resource=instances_set_labels_request_resource, + instances_set_labels_request_resource=compute.InstancesSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesSetLabelsRequest.to_json( - instances_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3597,9 +4218,14 @@ def test_set_machine_resources_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_machine_resources_request_resource" + ] = compute.InstancesSetMachineResourcesRequest( + guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3609,7 +4235,6 @@ def test_set_machine_resources_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3627,14 +4252,13 @@ def test_set_machine_resources_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_machine_resources(request) @@ -3645,7 +4269,6 @@ def test_set_machine_resources_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3663,18 +4286,45 @@ def test_set_machine_resources_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_machine_resources_rest_bad_request( + transport: str = "rest", request_type=compute.SetMachineResourcesInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_machine_resources_request_resource" + ] = compute.InstancesSetMachineResourcesRequest( + guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_machine_resources(request) + + def test_set_machine_resources_rest_from_dict(): test_set_machine_resources_rest(request_type=dict) -def test_set_machine_resources_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_machine_resources_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3682,42 +4332,47 @@ def test_set_machine_resources_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_set_machine_resources_request_resource = compute.InstancesSetMachineResourcesRequest( - guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - client.set_machine_resources( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_set_machine_resources_request_resource=instances_set_machine_resources_request_resource, + instances_set_machine_resources_request_resource=compute.InstancesSetMachineResourcesRequest( + guest_accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] + ), ) + mock_args.update(sample_request) + client.set_machine_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesSetMachineResourcesRequest.to_json( - instances_set_machine_resources_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_machine_resources_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineResources" + % client.transport._host, + args[1], + ) + + +def test_set_machine_resources_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3740,9 +4395,12 @@ def test_set_machine_type_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_machine_type_request_resource" + ] = compute.InstancesSetMachineTypeRequest(machine_type="machine_type_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3752,7 +4410,6 @@ def test_set_machine_type_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3770,14 +4427,13 @@ def test_set_machine_type_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_machine_type(request) @@ -3788,7 +4444,6 @@ def test_set_machine_type_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3806,18 +4461,43 @@ def test_set_machine_type_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_machine_type_rest_bad_request( + transport: str = "rest", request_type=compute.SetMachineTypeInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_machine_type_request_resource" + ] = compute.InstancesSetMachineTypeRequest(machine_type="machine_type_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_machine_type(request) + + def test_set_machine_type_rest_from_dict(): test_set_machine_type_rest(request_type=dict) -def test_set_machine_type_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_machine_type_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3825,42 +4505,47 @@ def test_set_machine_type_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_set_machine_type_request_resource = compute.InstancesSetMachineTypeRequest( - machine_type="machine_type_value" - ) - client.set_machine_type( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_set_machine_type_request_resource=instances_set_machine_type_request_resource, + instances_set_machine_type_request_resource=compute.InstancesSetMachineTypeRequest( + machine_type="machine_type_value" + ), ) + mock_args.update(sample_request) + client.set_machine_type(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesSetMachineTypeRequest.to_json( - instances_set_machine_type_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_machine_type_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMachineType" + % client.transport._host, + args[1], + ) + + +def test_set_machine_type_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -3883,9 +4568,12 @@ def test_set_metadata_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["metadata_resource"] = compute.Metadata( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3895,7 +4583,6 @@ def test_set_metadata_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3913,14 +4600,13 @@ def test_set_metadata_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_metadata(request) @@ -3931,7 +4617,6 @@ def test_set_metadata_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3949,18 +4634,43 @@ def test_set_metadata_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_metadata_rest_bad_request( + transport: str = "rest", request_type=compute.SetMetadataInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["metadata_resource"] = compute.Metadata( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_metadata(request) + + def test_set_metadata_rest_from_dict(): test_set_metadata_rest(request_type=dict) -def test_set_metadata_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_metadata_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3968,40 +4678,45 @@ def test_set_metadata_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - metadata_resource = compute.Metadata(fingerprint="fingerprint_value") - client.set_metadata( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - metadata_resource=metadata_resource, + metadata_resource=compute.Metadata(fingerprint="fingerprint_value"), ) + mock_args.update(sample_request) + client.set_metadata(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.Metadata.to_json( - metadata_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_metadata_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMetadata" + % client.transport._host, + args[1], + ) + + +def test_set_metadata_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4022,9 +4737,14 @@ def test_set_min_cpu_platform_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_min_cpu_platform_request_resource" + ] = compute.InstancesSetMinCpuPlatformRequest( + min_cpu_platform="min_cpu_platform_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4034,7 +4754,6 @@ def test_set_min_cpu_platform_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4052,14 +4771,13 @@ def test_set_min_cpu_platform_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_min_cpu_platform(request) @@ -4070,7 +4788,6 @@ def test_set_min_cpu_platform_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4088,18 +4805,45 @@ def test_set_min_cpu_platform_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_min_cpu_platform_rest_bad_request( + transport: str = "rest", request_type=compute.SetMinCpuPlatformInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_min_cpu_platform_request_resource" + ] = compute.InstancesSetMinCpuPlatformRequest( + min_cpu_platform="min_cpu_platform_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_min_cpu_platform(request) + + def test_set_min_cpu_platform_rest_from_dict(): test_set_min_cpu_platform_rest(request_type=dict) -def test_set_min_cpu_platform_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_min_cpu_platform_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4107,42 +4851,47 @@ def test_set_min_cpu_platform_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_set_min_cpu_platform_request_resource = compute.InstancesSetMinCpuPlatformRequest( - min_cpu_platform="min_cpu_platform_value" - ) - client.set_min_cpu_platform( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_set_min_cpu_platform_request_resource=instances_set_min_cpu_platform_request_resource, + instances_set_min_cpu_platform_request_resource=compute.InstancesSetMinCpuPlatformRequest( + min_cpu_platform="min_cpu_platform_value" + ), ) + mock_args.update(sample_request) + client.set_min_cpu_platform(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesSetMinCpuPlatformRequest.to_json( - instances_set_min_cpu_platform_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_min_cpu_platform_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setMinCpuPlatform" + % client.transport._host, + args[1], + ) + + +def test_set_min_cpu_platform_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4165,9 +4914,10 @@ def test_set_scheduling_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["scheduling_resource"] = compute.Scheduling(automatic_restart=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4177,7 +4927,6 @@ def test_set_scheduling_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4195,14 +4944,13 @@ def test_set_scheduling_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_scheduling(request) @@ -4213,7 +4961,6 @@ def test_set_scheduling_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4231,18 +4978,41 @@ def test_set_scheduling_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_scheduling_rest_bad_request( + transport: str = "rest", request_type=compute.SetSchedulingInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["scheduling_resource"] = compute.Scheduling(automatic_restart=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_scheduling(request) + + def test_set_scheduling_rest_from_dict(): test_set_scheduling_rest(request_type=dict) -def test_set_scheduling_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_scheduling_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4250,40 +5020,45 @@ def test_set_scheduling_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - scheduling_resource = compute.Scheduling(automatic_restart=True) - client.set_scheduling( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - scheduling_resource=scheduling_resource, + scheduling_resource=compute.Scheduling(automatic_restart=True), ) + mock_args.update(sample_request) + client.set_scheduling(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.Scheduling.to_json( - scheduling_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_scheduling_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setScheduling" + % client.transport._host, + args[1], + ) + + +def test_set_scheduling_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4304,9 +5079,12 @@ def test_set_service_account_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_service_account_request_resource" + ] = compute.InstancesSetServiceAccountRequest(email="email_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4316,7 +5094,6 @@ def test_set_service_account_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4334,14 +5111,13 @@ def test_set_service_account_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_service_account(request) @@ -4352,7 +5128,6 @@ def test_set_service_account_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4370,18 +5145,43 @@ def test_set_service_account_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_service_account_rest_bad_request( + transport: str = "rest", request_type=compute.SetServiceAccountInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_set_service_account_request_resource" + ] = compute.InstancesSetServiceAccountRequest(email="email_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_service_account(request) + + def test_set_service_account_rest_from_dict(): test_set_service_account_rest(request_type=dict) -def test_set_service_account_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_service_account_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4389,42 +5189,47 @@ def test_set_service_account_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_set_service_account_request_resource = compute.InstancesSetServiceAccountRequest( - email="email_value" - ) - client.set_service_account( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_set_service_account_request_resource=instances_set_service_account_request_resource, + instances_set_service_account_request_resource=compute.InstancesSetServiceAccountRequest( + email="email_value" + ), ) + mock_args.update(sample_request) + client.set_service_account(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesSetServiceAccountRequest.to_json( - instances_set_service_account_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_service_account_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setServiceAccount" + % client.transport._host, + args[1], + ) + + +def test_set_service_account_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4448,9 +5253,12 @@ def test_set_shielded_instance_integrity_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "shielded_instance_integrity_policy_resource" + ] = compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4460,7 +5268,6 @@ def test_set_shielded_instance_integrity_policy_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4478,14 +5285,13 @@ def test_set_shielded_instance_integrity_policy_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_shielded_instance_integrity_policy(request) @@ -4496,7 +5302,6 @@ def test_set_shielded_instance_integrity_policy_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4514,18 +5319,44 @@ def test_set_shielded_instance_integrity_policy_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_shielded_instance_integrity_policy_rest_bad_request( + transport: str = "rest", + request_type=compute.SetShieldedInstanceIntegrityPolicyInstanceRequest, +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "shielded_instance_integrity_policy_resource" + ] = compute.ShieldedInstanceIntegrityPolicy(update_auto_learn_policy=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_shielded_instance_integrity_policy(request) + + def test_set_shielded_instance_integrity_policy_rest_from_dict(): test_set_shielded_instance_integrity_policy_rest(request_type=dict) -def test_set_shielded_instance_integrity_policy_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_shielded_instance_integrity_policy_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4533,42 +5364,49 @@ def test_set_shielded_instance_integrity_policy_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - shielded_instance_integrity_policy_resource = compute.ShieldedInstanceIntegrityPolicy( - update_auto_learn_policy=True - ) - client.set_shielded_instance_integrity_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - shielded_instance_integrity_policy_resource=shielded_instance_integrity_policy_resource, + shielded_instance_integrity_policy_resource=compute.ShieldedInstanceIntegrityPolicy( + update_auto_learn_policy=True + ), ) + mock_args.update(sample_request) + client.set_shielded_instance_integrity_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.ShieldedInstanceIntegrityPolicy.to_json( - shielded_instance_integrity_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_shielded_instance_integrity_policy_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setShieldedInstanceIntegrityPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_shielded_instance_integrity_policy_rest_flattened_error( + transport: str = "rest", +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4591,9 +5429,10 @@ def test_set_tags_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["tags_resource"] = compute.Tags(fingerprint="fingerprint_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4603,7 +5442,6 @@ def test_set_tags_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4621,14 +5459,13 @@ def test_set_tags_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_tags(request) @@ -4639,7 +5476,6 @@ def test_set_tags_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4657,18 +5493,41 @@ def test_set_tags_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_tags_rest_bad_request( + transport: str = "rest", request_type=compute.SetTagsInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["tags_resource"] = compute.Tags(fingerprint="fingerprint_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_tags(request) + + def test_set_tags_rest_from_dict(): test_set_tags_rest(request_type=dict) -def test_set_tags_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_tags_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4676,40 +5535,45 @@ def test_set_tags_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - tags_resource = compute.Tags(fingerprint="fingerprint_value") - client.set_tags( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - tags_resource=tags_resource, + tags_resource=compute.Tags(fingerprint="fingerprint_value"), ) + mock_args.update(sample_request) + client.set_tags(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.Tags.to_json( - tags_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_tags_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/setTags" + % client.transport._host, + args[1], + ) + + +def test_set_tags_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4731,9 +5595,9 @@ def test_simulate_maintenance_event_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4743,7 +5607,6 @@ def test_simulate_maintenance_event_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4761,14 +5624,13 @@ def test_simulate_maintenance_event_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.simulate_maintenance_event(request) @@ -4779,7 +5641,6 @@ def test_simulate_maintenance_event_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4797,18 +5658,41 @@ def test_simulate_maintenance_event_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_simulate_maintenance_event_rest_bad_request( + transport: str = "rest", + request_type=compute.SimulateMaintenanceEventInstanceRequest, +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.simulate_maintenance_event(request) + + def test_simulate_maintenance_event_rest_from_dict(): test_simulate_maintenance_event_rest(request_type=dict) -def test_simulate_maintenance_event_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_simulate_maintenance_event_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4816,31 +5700,42 @@ def test_simulate_maintenance_event_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.simulate_maintenance_event( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.simulate_maintenance_event(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/simulateMaintenanceEvent" + % client.transport._host, + args[1], + ) -def test_simulate_maintenance_event_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_simulate_maintenance_event_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4858,9 +5753,9 @@ def test_start_rest(transport: str = "rest", request_type=compute.StartInstanceR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4870,7 +5765,6 @@ def test_start_rest(transport: str = "rest", request_type=compute.StartInstanceR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -4888,14 +5782,13 @@ def test_start_rest(transport: str = "rest", request_type=compute.StartInstanceR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.start(request) @@ -4906,7 +5799,6 @@ def test_start_rest(transport: str = "rest", request_type=compute.StartInstanceR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -4924,18 +5816,40 @@ def test_start_rest(transport: str = "rest", request_type=compute.StartInstanceR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_start_rest_bad_request( + transport: str = "rest", request_type=compute.StartInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start(request) + + def test_start_rest_from_dict(): test_start_rest(request_type=dict) -def test_start_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_start_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4943,31 +5857,42 @@ def test_start_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.start( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.start(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/start" + % client.transport._host, + args[1], + ) -def test_start_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_start_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -4987,9 +5912,20 @@ def test_start_with_encryption_key_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_start_with_encryption_key_request_resource" + ] = compute.InstancesStartWithEncryptionKeyRequest( + disks=[ + compute.CustomerEncryptionKeyProtectedDisk( + disk_encryption_key=compute.CustomerEncryptionKey( + kms_key_name="kms_key_name_value" + ) + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -4999,7 +5935,6 @@ def test_start_with_encryption_key_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5017,14 +5952,13 @@ def test_start_with_encryption_key_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.start_with_encryption_key(request) @@ -5035,7 +5969,6 @@ def test_start_with_encryption_key_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -5053,18 +5986,51 @@ def test_start_with_encryption_key_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_start_with_encryption_key_rest_bad_request( + transport: str = "rest", request_type=compute.StartWithEncryptionKeyInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init[ + "instances_start_with_encryption_key_request_resource" + ] = compute.InstancesStartWithEncryptionKeyRequest( + disks=[ + compute.CustomerEncryptionKeyProtectedDisk( + disk_encryption_key=compute.CustomerEncryptionKey( + kms_key_name="kms_key_name_value" + ) + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.start_with_encryption_key(request) + + def test_start_with_encryption_key_rest_from_dict(): test_start_with_encryption_key_rest(request_type=dict) -def test_start_with_encryption_key_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_start_with_encryption_key_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5072,48 +6038,53 @@ def test_start_with_encryption_key_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instances_start_with_encryption_key_request_resource = compute.InstancesStartWithEncryptionKeyRequest( - disks=[ - compute.CustomerEncryptionKeyProtectedDisk( - disk_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) - ) - ] - ) - client.start_with_encryption_key( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instances_start_with_encryption_key_request_resource=instances_start_with_encryption_key_request_resource, + instances_start_with_encryption_key_request_resource=compute.InstancesStartWithEncryptionKeyRequest( + disks=[ + compute.CustomerEncryptionKeyProtectedDisk( + disk_encryption_key=compute.CustomerEncryptionKey( + kms_key_name="kms_key_name_value" + ) + ) + ] + ), ) + mock_args.update(sample_request) + client.start_with_encryption_key(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.InstancesStartWithEncryptionKeyRequest.to_json( - instances_start_with_encryption_key_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_start_with_encryption_key_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/startWithEncryptionKey" + % client.transport._host, + args[1], + ) + + +def test_start_with_encryption_key_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5140,9 +6111,9 @@ def test_stop_rest(transport: str = "rest", request_type=compute.StopInstanceReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5152,7 +6123,6 @@ def test_stop_rest(transport: str = "rest", request_type=compute.StopInstanceReq creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5170,14 +6140,13 @@ def test_stop_rest(transport: str = "rest", request_type=compute.StopInstanceReq target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.stop(request) @@ -5188,7 +6157,6 @@ def test_stop_rest(transport: str = "rest", request_type=compute.StopInstanceReq assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -5206,18 +6174,40 @@ def test_stop_rest(transport: str = "rest", request_type=compute.StopInstanceReq assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_stop_rest_bad_request( + transport: str = "rest", request_type=compute.StopInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.stop(request) + + def test_stop_rest_from_dict(): test_stop_rest(request_type=dict) -def test_stop_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_stop_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5225,31 +6215,42 @@ def test_stop_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.stop( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", ) + mock_args.update(sample_request) + client.stop(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/stop" + % client.transport._host, + args[1], + ) -def test_stop_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_stop_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5269,9 +6270,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5281,9 +6285,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -5293,12 +6297,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5306,42 +6338,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5364,9 +6401,14 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["instance_resource"] = compute.Instance( + advanced_machine_features=compute.AdvancedMachineFeatures( + enable_nested_virtualization=True + ) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5376,7 +6418,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5394,14 +6435,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -5412,7 +6452,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -5430,18 +6469,45 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["instance_resource"] = compute.Instance( + advanced_machine_features=compute.AdvancedMachineFeatures( + enable_nested_virtualization=True + ) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5449,44 +6515,49 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_resource = compute.Instance( - advanced_machine_features=compute.AdvancedMachineFeatures( - enable_nested_virtualization=True - ) - ) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - instance_resource=instance_resource, + instance_resource=compute.Instance( + advanced_machine_features=compute.AdvancedMachineFeatures( + enable_nested_virtualization=True + ) + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.Instance.to_json( - instance_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5511,9 +6582,12 @@ def test_update_access_config_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["access_config_resource"] = compute.AccessConfig( + external_ipv6="external_ipv6_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5523,7 +6597,6 @@ def test_update_access_config_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5541,14 +6614,13 @@ def test_update_access_config_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_access_config(request) @@ -5559,7 +6631,6 @@ def test_update_access_config_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -5577,18 +6648,43 @@ def test_update_access_config_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_access_config_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateAccessConfigInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["access_config_resource"] = compute.AccessConfig( + external_ipv6="external_ipv6_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_access_config(request) + + def test_update_access_config_rest_from_dict(): test_update_access_config_rest(request_type=dict) -def test_update_access_config_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_access_config_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5596,44 +6692,48 @@ def test_update_access_config_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - access_config_resource = compute.AccessConfig( - external_ipv6="external_ipv6_value" - ) - client.update_access_config( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", network_interface="network_interface_value", - access_config_resource=access_config_resource, + access_config_resource=compute.AccessConfig( + external_ipv6="external_ipv6_value" + ), ) + mock_args.update(sample_request) + client.update_access_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert "network_interface_value" in http_call[1] + str(body) + str(params) - assert compute.AccessConfig.to_json( - access_config_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_access_config_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateAccessConfig" + % client.transport._host, + args[1], + ) + + +def test_update_access_config_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5657,9 +6757,10 @@ def test_update_display_device_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["display_device_resource"] = compute.DisplayDevice(enable_display=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5669,7 +6770,6 @@ def test_update_display_device_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5687,14 +6787,13 @@ def test_update_display_device_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_display_device(request) @@ -5705,7 +6804,6 @@ def test_update_display_device_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -5723,18 +6821,41 @@ def test_update_display_device_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_display_device_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateDisplayDeviceInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["display_device_resource"] = compute.DisplayDevice(enable_display=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_display_device(request) + + def test_update_display_device_rest_from_dict(): test_update_display_device_rest(request_type=dict) -def test_update_display_device_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_display_device_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5742,40 +6863,45 @@ def test_update_display_device_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - display_device_resource = compute.DisplayDevice(enable_display=True) - client.update_display_device( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - display_device_resource=display_device_resource, + display_device_resource=compute.DisplayDevice(enable_display=True), ) + mock_args.update(sample_request) + client.update_display_device(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.DisplayDevice.to_json( - display_device_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_display_device_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateDisplayDevice" + % client.transport._host, + args[1], + ) + + +def test_update_display_device_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5796,9 +6922,12 @@ def test_update_network_interface_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["network_interface_resource"] = compute.NetworkInterface( + access_configs=[compute.AccessConfig(external_ipv6="external_ipv6_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5808,7 +6937,6 @@ def test_update_network_interface_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5826,14 +6954,13 @@ def test_update_network_interface_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_network_interface(request) @@ -5844,7 +6971,6 @@ def test_update_network_interface_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -5862,18 +6988,43 @@ def test_update_network_interface_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_network_interface_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateNetworkInterfaceInstanceRequest +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["network_interface_resource"] = compute.NetworkInterface( + access_configs=[compute.AccessConfig(external_ipv6="external_ipv6_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_network_interface(request) + + def test_update_network_interface_rest_from_dict(): test_update_network_interface_rest(request_type=dict) -def test_update_network_interface_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_network_interface_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5881,44 +7032,50 @@ def test_update_network_interface_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_interface_resource = compute.NetworkInterface( - access_configs=[compute.AccessConfig(external_ipv6="external_ipv6_value")] - ) - client.update_network_interface( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", network_interface="network_interface_value", - network_interface_resource=network_interface_resource, + network_interface_resource=compute.NetworkInterface( + access_configs=[ + compute.AccessConfig(external_ipv6="external_ipv6_value") + ] + ), ) + mock_args.update(sample_request) + client.update_network_interface(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert "network_interface_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkInterface.to_json( - network_interface_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_network_interface_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateNetworkInterface" + % client.transport._host, + args[1], + ) + + +def test_update_network_interface_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -5945,9 +7102,12 @@ def test_update_shielded_instance_config_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["shielded_instance_config_resource"] = compute.ShieldedInstanceConfig( + enable_integrity_monitoring=True + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -5957,7 +7117,6 @@ def test_update_shielded_instance_config_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -5975,14 +7134,13 @@ def test_update_shielded_instance_config_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_shielded_instance_config(request) @@ -5993,7 +7151,6 @@ def test_update_shielded_instance_config_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -6011,18 +7168,44 @@ def test_update_shielded_instance_config_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_shielded_instance_config_rest_bad_request( + transport: str = "rest", + request_type=compute.UpdateShieldedInstanceConfigInstanceRequest, +): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} + request_init["shielded_instance_config_resource"] = compute.ShieldedInstanceConfig( + enable_integrity_monitoring=True + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_shielded_instance_config(request) + + def test_update_shielded_instance_config_rest_from_dict(): test_update_shielded_instance_config_rest(request_type=dict) -def test_update_shielded_instance_config_rest_flattened(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_shielded_instance_config_rest_flattened(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -6030,42 +7213,47 @@ def test_update_shielded_instance_config_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - shielded_instance_config_resource = compute.ShieldedInstanceConfig( - enable_integrity_monitoring=True - ) - client.update_shielded_instance_config( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", instance="instance_value", - shielded_instance_config_resource=shielded_instance_config_resource, + shielded_instance_config_resource=compute.ShieldedInstanceConfig( + enable_integrity_monitoring=True + ), ) + mock_args.update(sample_request) + client.update_shielded_instance_config(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "instance_value" in http_call[1] + str(body) + str(params) - assert compute.ShieldedInstanceConfig.to_json( - shielded_instance_config_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_shielded_instance_config_rest_flattened_error(): - client = InstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/instances/{instance}/updateShieldedInstanceConfig" + % client.transport._host, + args[1], + ) + + +def test_update_shielded_instance_config_rest_flattened_error(transport: str = "rest"): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -6199,8 +7387,10 @@ def test_instances_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_instances_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -6224,29 +7414,6 @@ def test_instances_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_instances_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.instances.transports.InstancesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InstancesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_instances_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -6258,7 +7425,6 @@ def test_instances_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_instances_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -6274,21 +7440,6 @@ def test_instances_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_instances_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InstancesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_instances_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -6435,3 +7586,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_interconnect_attachments.py b/tests/unit/gapic/compute_v1/test_interconnect_attachments.py index 5b689fac1..9c4d8409b 100644 --- a/tests/unit/gapic/compute_v1/test_interconnect_attachments.py +++ b/tests/unit/gapic/compute_v1/test_interconnect_attachments.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.interconnect_attachments import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.interconnect_attachments import pagers from google.cloud.compute_v1.services.interconnect_attachments import transports -from google.cloud.compute_v1.services.interconnect_attachments.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_interconnect_attachments_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_interconnect_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_interconnect_attachments_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_interconnect_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_interconnect_attachments_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_interconnect_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_interconnect_attachments_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_interconnect_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_interconnect_attachments_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_interconnect_attachments_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_interconnect_attachments_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_interconnect_attachments_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_interconnect_attachments_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_interconnect_attachments_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_interconnect_attachments_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_interconnect_attachments_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_interconnect_attachments_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_interconnect_attachments_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -455,35 +448,27 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InterconnectAttachmentAggregatedList( id="id_value", - items={ - "key_value": compute.InterconnectAttachmentsScopedList( - interconnect_attachments=[ - compute.InterconnectAttachment(admin_enabled=True) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InterconnectAttachmentAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -491,27 +476,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.InterconnectAttachmentsScopedList( - interconnect_attachments=[ - compute.InterconnectAttachment(admin_enabled=True) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", + request_type=compute.AggregatedListInterconnectAttachmentsRequest, +): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): +def test_aggregated_list_rest_flattened(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -520,30 +521,37 @@ def test_aggregated_list_rest_flattened(): return_value = compute.InterconnectAttachmentAggregatedList() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InterconnectAttachmentAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/interconnectAttachments" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -555,13 +563,15 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = InterconnectAttachmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InterconnectAttachmentAggregatedList( @@ -599,10 +609,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.InterconnectAttachmentsScopedList) assert pager.get("h") is None @@ -620,7 +629,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.InterconnectAttachmentsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -632,9 +641,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -644,7 +657,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -662,14 +674,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -680,7 +691,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -698,19 +708,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteInterconnectAttachmentRequest +): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -719,34 +753,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", interconnect_attachment="interconnect_attachment_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "interconnect_attachment_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -767,9 +810,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -795,12 +842,6 @@ def test_get_rest( operational_status=compute.InterconnectAttachment.OperationalStatus.OS_ACTIVE, pairing_key="pairing_key_value", partner_asn=1181, - partner_metadata=compute.InterconnectAttachmentPartnerMetadata( - interconnect_name="interconnect_name_value" - ), - private_interconnect_info=compute.InterconnectAttachmentPrivateInfo( - tag8021q=632 - ), region="region_value", router="router_value", satisfies_pzs=True, @@ -811,9 +852,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectAttachment.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectAttachment.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -846,13 +887,6 @@ def test_get_rest( ) assert response.pairing_key == "pairing_key_value" assert response.partner_asn == 1181 - assert response.partner_metadata == compute.InterconnectAttachmentPartnerMetadata( - interconnect_name="interconnect_name_value" - ) - assert ( - response.private_interconnect_info - == compute.InterconnectAttachmentPrivateInfo(tag8021q=632) - ) assert response.region == "region_value" assert response.router == "router_value" assert response.satisfies_pzs is True @@ -862,13 +896,40 @@ def test_get_rest( assert response.vlan_tag8021q == 1160 +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInterconnectAttachmentRequest +): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -877,34 +938,43 @@ def test_get_rest_flattened(): return_value = compute.InterconnectAttachment() # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectAttachment.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectAttachment.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", interconnect_attachment="interconnect_attachment_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "interconnect_attachment_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -925,9 +995,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["interconnect_attachment_resource"] = compute.InterconnectAttachment( + admin_enabled=True + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -937,7 +1010,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -955,14 +1027,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -973,7 +1044,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -991,19 +1061,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertInterconnectAttachmentRequest +): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["interconnect_attachment_resource"] = compute.InterconnectAttachment( + admin_enabled=True + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1012,41 +1105,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - interconnect_attachment_resource = compute.InterconnectAttachment( - admin_enabled=True - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - interconnect_attachment_resource=interconnect_attachment_resource, + interconnect_attachment_resource=compute.InterconnectAttachment( + admin_enabled=True + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.InterconnectAttachment.to_json( - interconnect_attachment_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1069,26 +1162,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InterconnectAttachmentList( id="id_value", - items=[compute.InterconnectAttachment(admin_enabled=True)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectAttachmentList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectAttachmentList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1096,20 +1187,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.InterconnectAttachment(admin_enabled=True)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInterconnectAttachmentsRequest +): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1118,31 +1230,35 @@ def test_list_rest_flattened(): return_value = compute.InterconnectAttachmentList() # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectAttachmentList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectAttachmentList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1155,13 +1271,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InterconnectAttachmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InterconnectAttachmentList( @@ -1196,16 +1314,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InterconnectAttachment) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1217,9 +1334,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + request_init["interconnect_attachment_resource"] = compute.InterconnectAttachment( + admin_enabled=True + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1229,7 +1353,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1247,14 +1370,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1265,7 +1387,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1283,19 +1404,46 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchInterconnectAttachmentRequest +): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + request_init["interconnect_attachment_resource"] = compute.InterconnectAttachment( + admin_enabled=True + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1304,43 +1452,46 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - interconnect_attachment_resource = compute.InterconnectAttachment( - admin_enabled=True - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "interconnect_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", interconnect_attachment="interconnect_attachment_value", - interconnect_attachment_resource=interconnect_attachment_resource, + interconnect_attachment_resource=compute.InterconnectAttachment( + admin_enabled=True + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "interconnect_attachment_value" in http_call[1] + str(body) + str(params) - assert compute.InterconnectAttachment.to_json( - interconnect_attachment_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/interconnectAttachments/{interconnect_attachment}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = InterconnectAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1440,8 +1591,10 @@ def test_interconnect_attachments_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_interconnect_attachments_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1465,29 +1618,6 @@ def test_interconnect_attachments_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_interconnect_attachments_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.interconnect_attachments.transports.InterconnectAttachmentsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InterconnectAttachmentsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_interconnect_attachments_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1499,7 +1629,6 @@ def test_interconnect_attachments_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_interconnect_attachments_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1515,21 +1644,6 @@ def test_interconnect_attachments_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_interconnect_attachments_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InterconnectAttachmentsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_interconnect_attachments_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1676,3 +1790,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InterconnectAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_interconnect_locations.py b/tests/unit/gapic/compute_v1/test_interconnect_locations.py index 6a16bee3b..b777ee476 100644 --- a/tests/unit/gapic/compute_v1/test_interconnect_locations.py +++ b/tests/unit/gapic/compute_v1/test_interconnect_locations.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.interconnect_locations import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.interconnect_locations import pagers from google.cloud.compute_v1.services.interconnect_locations import transports -from google.cloud.compute_v1.services.interconnect_locations.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_interconnect_locations_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_interconnect_locations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_interconnect_locations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_interconnect_locations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_interconnect_locations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_interconnect_locations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_interconnect_locations_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_interconnect_locations_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_interconnect_locations_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_interconnect_locations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_interconnect_locations_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_interconnect_locations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_interconnect_locations_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_interconnect_locations_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_interconnect_locations_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_interconnect_locations_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_interconnect_locations_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_interconnect_locations_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -454,9 +447,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect_location": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -474,16 +467,15 @@ def test_get_rest( kind="kind_value", name="name_value", peeringdb_facility_id="peeringdb_facility_id_value", - region_infos=[compute.InterconnectLocationRegionInfo(expected_rtt_ms=1610)], self_link="self_link_value", status=compute.InterconnectLocation.Status.AVAILABLE, supports_pzs=True, ) # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectLocation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectLocation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -504,21 +496,41 @@ def test_get_rest( assert response.kind == "kind_value" assert response.name == "name_value" assert response.peeringdb_facility_id == "peeringdb_facility_id_value" - assert response.region_infos == [ - compute.InterconnectLocationRegionInfo(expected_rtt_ms=1610) - ] assert response.self_link == "self_link_value" assert response.status == compute.InterconnectLocation.Status.AVAILABLE assert response.supports_pzs is True +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInterconnectLocationRequest +): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect_location": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = InterconnectLocationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -527,32 +539,38 @@ def test_get_rest_flattened(): return_value = compute.InterconnectLocation() # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectLocation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectLocation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "interconnect_location": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", interconnect_location="interconnect_location_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "interconnect_location_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnectLocations/{interconnect_location}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = InterconnectLocationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -572,26 +590,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InterconnectLocationList( id="id_value", - items=[compute.InterconnectLocation(address="address_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectLocationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectLocationList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -599,20 +615,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.InterconnectLocation(address="address_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInterconnectLocationsRequest +): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = InterconnectLocationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -621,28 +658,35 @@ def test_list_rest_flattened(): return_value = compute.InterconnectLocationList() # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectLocationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectLocationList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnectLocations" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = InterconnectLocationsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -653,13 +697,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InterconnectLocationsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InterconnectLocationList( @@ -689,16 +735,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InterconnectLocation) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -782,8 +827,10 @@ def test_interconnect_locations_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_interconnect_locations_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -808,30 +855,6 @@ def test_interconnect_locations_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_interconnect_locations_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.interconnect_locations.transports.InterconnectLocationsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InterconnectLocationsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_interconnect_locations_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -843,7 +866,6 @@ def test_interconnect_locations_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_interconnect_locations_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -860,22 +882,6 @@ def test_interconnect_locations_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_interconnect_locations_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InterconnectLocationsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_interconnect_locations_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1022,3 +1028,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InterconnectLocationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_interconnects.py b/tests/unit/gapic/compute_v1/test_interconnects.py index 53937d7af..e87bc4835 100644 --- a/tests/unit/gapic/compute_v1/test_interconnects.py +++ b/tests/unit/gapic/compute_v1/test_interconnects.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.interconnects import InterconnectsClient from google.cloud.compute_v1.services.interconnects import pagers from google.cloud.compute_v1.services.interconnects import transports -from google.cloud.compute_v1.services.interconnects.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -194,7 +178,7 @@ def test_interconnects_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -203,6 +187,7 @@ def test_interconnects_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -210,7 +195,7 @@ def test_interconnects_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +204,7 @@ def test_interconnects_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +212,7 @@ def test_interconnects_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +221,7 @@ def test_interconnects_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -254,7 +241,7 @@ def test_interconnects_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -263,6 +250,7 @@ def test_interconnects_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -295,7 +283,7 @@ def test_interconnects_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -312,6 +300,7 @@ def test_interconnects_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -336,7 +325,7 @@ def test_interconnects_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -345,6 +334,7 @@ def test_interconnects_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -357,7 +347,7 @@ def test_interconnects_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -366,6 +356,7 @@ def test_interconnects_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -380,7 +371,7 @@ def test_interconnects_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -389,6 +380,7 @@ def test_interconnects_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -403,7 +395,7 @@ def test_interconnects_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -412,6 +404,7 @@ def test_interconnects_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -422,9 +415,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -434,7 +427,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -452,14 +444,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -470,7 +461,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -488,18 +478,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteInterconnectRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -507,30 +519,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", interconnect="interconnect_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "interconnect": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", interconnect="interconnect_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "interconnect_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -547,28 +565,18 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetInterconnectR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Interconnect( admin_enabled=True, - circuit_infos=[ - compute.InterconnectCircuitInfo( - customer_demarc_id="customer_demarc_id_value" - ) - ], creation_timestamp="creation_timestamp_value", customer_name="customer_name_value", description="description_value", - expected_outages=[ - compute.InterconnectOutageNotification( - affected_circuits=["affected_circuits_value"] - ) - ], google_ip_address="google_ip_address_value", google_reference_id="google_reference_id_value", id=205, @@ -589,9 +597,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetInterconnectR ) # Wrap the value into a proper Response obj - json_return_value = compute.Interconnect.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Interconnect.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -599,17 +607,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetInterconnectR # Establish that the response is the type that we expect. assert isinstance(response, compute.Interconnect) assert response.admin_enabled is True - assert response.circuit_infos == [ - compute.InterconnectCircuitInfo(customer_demarc_id="customer_demarc_id_value") - ] assert response.creation_timestamp == "creation_timestamp_value" assert response.customer_name == "customer_name_value" assert response.description == "description_value" - assert response.expected_outages == [ - compute.InterconnectOutageNotification( - affected_circuits=["affected_circuits_value"] - ) - ] assert response.google_ip_address == "google_ip_address_value" assert response.google_reference_id == "google_reference_id_value" assert response.id == 205 @@ -633,12 +633,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetInterconnectR assert response.state == compute.Interconnect.State.ACTIVE +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetInterconnectRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -646,30 +671,36 @@ def test_get_rest_flattened(): return_value = compute.Interconnect() # Wrap the value into a proper Response obj - json_return_value = compute.Interconnect.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Interconnect.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", interconnect="interconnect_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "interconnect": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", interconnect="interconnect_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "interconnect_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -688,48 +719,60 @@ def test_get_diagnostics_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.InterconnectsGetDiagnosticsResponse( - result=compute.InterconnectDiagnostics( - arp_caches=[ - compute.InterconnectDiagnosticsARPEntry( - ip_address="ip_address_value" - ) - ] - ), - ) + return_value = compute.InterconnectsGetDiagnosticsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InterconnectsGetDiagnosticsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_diagnostics(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.InterconnectsGetDiagnosticsResponse) - assert response.result == compute.InterconnectDiagnostics( - arp_caches=[ - compute.InterconnectDiagnosticsARPEntry(ip_address="ip_address_value") - ] + + +def test_get_diagnostics_rest_bad_request( + transport: str = "rest", request_type=compute.GetDiagnosticsInterconnectRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_diagnostics(request) + def test_get_diagnostics_rest_from_dict(): test_get_diagnostics_rest(request_type=dict) -def test_get_diagnostics_rest_flattened(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_diagnostics_rest_flattened(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -737,32 +780,38 @@ def test_get_diagnostics_rest_flattened(): return_value = compute.InterconnectsGetDiagnosticsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.InterconnectsGetDiagnosticsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_diagnostics( - project="project_value", interconnect="interconnect_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "interconnect": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", interconnect="interconnect_value",) + mock_args.update(sample_request) + client.get_diagnostics(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "interconnect_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnects/{interconnect}/getDiagnostics" + % client.transport._host, + args[1], + ) -def test_get_diagnostics_rest_flattened_error(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_diagnostics_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -781,9 +830,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["interconnect_resource"] = compute.Interconnect(admin_enabled=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -793,7 +843,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -811,14 +860,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -829,7 +877,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -847,18 +894,41 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertInterconnectRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["interconnect_resource"] = compute.Interconnect(admin_enabled=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -866,35 +936,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - interconnect_resource = compute.Interconnect(admin_enabled=True) - client.insert( - project="project_value", interconnect_resource=interconnect_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + interconnect_resource=compute.Interconnect(admin_enabled=True), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Interconnect.to_json( - interconnect_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnects" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -913,26 +987,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InterconnectList( id="id_value", - items=[compute.Interconnect(admin_enabled=True)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -940,19 +1012,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Interconnect(admin_enabled=True)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListInterconnectsRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -960,27 +1055,36 @@ def test_list_rest_flattened(): return_value = compute.InterconnectList() # Wrap the value into a proper Response obj - json_return_value = compute.InterconnectList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InterconnectList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnects" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -990,11 +1094,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.InterconnectList( @@ -1024,16 +1130,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Interconnect) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1045,9 +1150,10 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request_init["interconnect_resource"] = compute.Interconnect(admin_enabled=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1057,7 +1163,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1075,14 +1180,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1093,7 +1197,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1111,18 +1214,41 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchInterconnectRequest +): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "interconnect": "sample2"} + request_init["interconnect_resource"] = compute.Interconnect(admin_enabled=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1130,38 +1256,40 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - interconnect_resource = compute.Interconnect(admin_enabled=True) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "interconnect": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", interconnect="interconnect_value", - interconnect_resource=interconnect_resource, + interconnect_resource=compute.Interconnect(admin_enabled=True), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "interconnect_value" in http_call[1] + str(body) + str(params) - assert compute.Interconnect.to_json( - interconnect_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = InterconnectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/interconnects/{interconnect}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1255,8 +1383,10 @@ def test_interconnects_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_interconnects_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1280,29 +1410,6 @@ def test_interconnects_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_interconnects_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.interconnects.transports.InterconnectsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.InterconnectsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_interconnects_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1314,7 +1421,6 @@ def test_interconnects_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_interconnects_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1330,21 +1436,6 @@ def test_interconnects_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_interconnects_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - InterconnectsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_interconnects_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1491,3 +1582,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InterconnectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_license_codes.py b/tests/unit/gapic/compute_v1/test_license_codes.py index 99d81a433..3bb1251b1 100644 --- a/tests/unit/gapic/compute_v1/test_license_codes.py +++ b/tests/unit/gapic/compute_v1/test_license_codes.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,32 +31,16 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.license_codes import LicenseCodesClient from google.cloud.compute_v1.services.license_codes import transports -from google.cloud.compute_v1.services.license_codes.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -188,7 +172,7 @@ def test_license_codes_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -197,6 +181,7 @@ def test_license_codes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -204,7 +189,7 @@ def test_license_codes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +198,7 @@ def test_license_codes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +206,7 @@ def test_license_codes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +215,7 @@ def test_license_codes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -248,7 +235,7 @@ def test_license_codes_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -257,6 +244,7 @@ def test_license_codes_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -287,7 +275,7 @@ def test_license_codes_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -304,6 +292,7 @@ def test_license_codes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -328,7 +317,7 @@ def test_license_codes_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -337,6 +326,7 @@ def test_license_codes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -349,7 +339,7 @@ def test_license_codes_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -358,6 +348,7 @@ def test_license_codes_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -372,7 +363,7 @@ def test_license_codes_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -381,6 +372,7 @@ def test_license_codes_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -395,7 +387,7 @@ def test_license_codes_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -404,6 +396,7 @@ def test_license_codes_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,9 +405,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseCodeRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "license_code": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -424,9 +417,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseCodeRe description="description_value", id=205, kind="kind_value", - license_alias=[ - compute.LicenseCodeLicenseAlias(description="description_value") - ], name="name_value", self_link="self_link_value", state=compute.LicenseCode.State.DISABLED, @@ -434,9 +424,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseCodeRe ) # Wrap the value into a proper Response obj - json_return_value = compute.LicenseCode.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.LicenseCode.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -447,21 +437,43 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseCodeRe assert response.description == "description_value" assert response.id == 205 assert response.kind == "kind_value" - assert response.license_alias == [ - compute.LicenseCodeLicenseAlias(description="description_value") - ] assert response.name == "name_value" assert response.self_link == "self_link_value" assert response.state == compute.LicenseCode.State.DISABLED assert response.transferable is True +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetLicenseCodeRequest +): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "license_code": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = LicenseCodesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -469,30 +481,36 @@ def test_get_rest_flattened(): return_value = compute.LicenseCode() # Wrap the value into a proper Response obj - json_return_value = compute.LicenseCode.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.LicenseCode.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", license_code="license_code_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "license_code": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", license_code="license_code_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "license_code_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenseCodes/{license_code}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = LicenseCodesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -511,9 +529,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -523,9 +544,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -535,12 +556,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsLicenseCodeRequest +): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = LicenseCodesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -548,40 +597,42 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenseCodes/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) -def test_test_iam_permissions_rest_flattened_error(): - client = LicenseCodesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -673,8 +724,10 @@ def test_license_codes_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_license_codes_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -699,30 +752,6 @@ def test_license_codes_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_license_codes_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.license_codes.transports.LicenseCodesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LicenseCodesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_license_codes_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -734,7 +763,6 @@ def test_license_codes_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_license_codes_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -751,22 +779,6 @@ def test_license_codes_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_license_codes_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LicenseCodesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_license_codes_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -913,3 +925,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = LicenseCodesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_licenses.py b/tests/unit/gapic/compute_v1/test_licenses.py index cca708c67..1bf91be1e 100644 --- a/tests/unit/gapic/compute_v1/test_licenses.py +++ b/tests/unit/gapic/compute_v1/test_licenses.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.licenses import LicensesClient from google.cloud.compute_v1.services.licenses import pagers from google.cloud.compute_v1.services.licenses import transports -from google.cloud.compute_v1.services.licenses.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_licenses_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_licenses_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_licenses_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_licenses_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_licenses_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_licenses_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_licenses_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_licenses_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_licenses_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_licenses_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_licenses_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,9 +404,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "license_": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -423,7 +416,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -441,14 +433,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -459,7 +450,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -477,18 +467,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteLicenseRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "license_": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,30 +508,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", license_="license__value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "license_": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", license_="license__value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "license__value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses/{license_}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -536,9 +554,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseReques credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "license_": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -551,17 +569,14 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseReques kind="kind_value", license_code=1245, name="name_value", - resource_requirements=compute.LicenseResourceRequirements( - min_guest_cpu_count=2042 - ), self_link="self_link_value", transferable=True, ) # Wrap the value into a proper Response obj - json_return_value = compute.License.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.License.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -575,19 +590,41 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetLicenseReques assert response.kind == "kind_value" assert response.license_code == 1245 assert response.name == "name_value" - assert response.resource_requirements == compute.LicenseResourceRequirements( - min_guest_cpu_count=2042 - ) assert response.self_link == "self_link_value" assert response.transferable is True +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetLicenseRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "license_": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -595,30 +632,36 @@ def test_get_rest_flattened(): return_value = compute.License() # Wrap the value into a proper Response obj - json_return_value = compute.License.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.License.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", license_="license__value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "license_": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", license_="license__value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "license__value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses/{license_}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -637,60 +680,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyLicenseRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -698,30 +742,36 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - project="project_value", resource="resource_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", resource="resource_value",) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -740,9 +790,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["license_resource"] = compute.License(charges_use_fee=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -752,7 +803,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -770,14 +820,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -788,7 +837,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -806,18 +854,41 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertLicenseRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["license_resource"] = compute.License(charges_use_fee=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -825,35 +896,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - license_resource = compute.License(charges_use_fee=True) - client.insert( - project="project_value", license_resource=license_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + license_resource=compute.License(charges_use_fee=True), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.License.to_json( - license_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -870,25 +945,23 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListLicensesReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.LicensesListResponse( id="id_value", - items=[compute.License(charges_use_fee=True)], next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.LicensesListResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.LicensesListResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -896,18 +969,41 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListLicensesReq # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.License(charges_use_fee=True)] assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListLicensesRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -915,27 +1011,36 @@ def test_list_rest_flattened(): return_value = compute.LicensesListResponse() # Wrap the value into a proper Response obj - json_return_value = compute.LicensesListResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.LicensesListResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -945,11 +1050,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.LicensesListResponse( @@ -975,16 +1082,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.License) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -996,60 +1102,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyLicenseRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1057,40 +1170,42 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_policy_request_resource = compute.GlobalSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_policy_request_resource=global_set_policy_request_resource, + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetPolicyRequest.to_json( - global_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1112,9 +1227,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1124,9 +1242,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1136,12 +1254,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsLicenseRequest +): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1149,40 +1295,42 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = LicensesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/licenses/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1279,8 +1427,10 @@ def test_licenses_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_licenses_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1304,29 +1454,6 @@ def test_licenses_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_licenses_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.licenses.transports.LicensesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.LicensesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_licenses_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1338,7 +1465,6 @@ def test_licenses_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_licenses_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1354,21 +1480,6 @@ def test_licenses_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_licenses_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - LicensesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_licenses_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1515,3 +1626,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = LicensesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_machine_types.py b/tests/unit/gapic/compute_v1/test_machine_types.py index 40e2f7fbd..6c7bdc982 100644 --- a/tests/unit/gapic/compute_v1/test_machine_types.py +++ b/tests/unit/gapic/compute_v1/test_machine_types.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.machine_types import MachineTypesClient from google.cloud.compute_v1.services.machine_types import pagers from google.cloud.compute_v1.services.machine_types import transports -from google.cloud.compute_v1.services.machine_types.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_machine_types_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_machine_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_machine_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_machine_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_machine_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_machine_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_machine_types_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_machine_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_machine_types_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_machine_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_machine_types_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_machine_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_machine_types_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_machine_types_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_machine_types_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_machine_types_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_machine_types_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_machine_types_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,37 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.MachineTypeAggregatedList( id="id_value", - items={ - "key_value": compute.MachineTypesScopedList( - machine_types=[ - compute.MachineType( - accelerators=[ - compute.Accelerators(guest_accelerator_count=2452) - ] - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.MachineTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.MachineTypeAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -453,28 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.MachineTypesScopedList( - machine_types=[ - compute.MachineType( - accelerators=[compute.Accelerators(guest_accelerator_count=2452)] - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListMachineTypesRequest +): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -482,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.MachineTypeAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.MachineTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.MachineTypeAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/machineTypes" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -512,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.MachineTypeAggregatedList( @@ -549,10 +556,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.MachineTypesScopedList) assert pager.get("h") is None @@ -570,7 +576,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.MachineTypesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -580,17 +586,15 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetMachineTypeRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "machine_type": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.MachineType( - accelerators=[compute.Accelerators(guest_accelerator_count=2452)], creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", guest_cpus=1090, id=205, @@ -601,24 +605,21 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetMachineTypeRe maximum_persistent_disks_size_gb=3437, memory_mb=967, name="name_value", - scratch_disks=[compute.ScratchDisks(disk_gb=723)], self_link="self_link_value", zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.MachineType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.MachineType.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.MachineType) - assert response.accelerators == [compute.Accelerators(guest_accelerator_count=2452)] assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.guest_cpus == 1090 assert response.id == 205 @@ -629,17 +630,41 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetMachineTypeRe assert response.maximum_persistent_disks_size_gb == 3437 assert response.memory_mb == 967 assert response.name == "name_value" - assert response.scratch_disks == [compute.ScratchDisks(disk_gb=723)] assert response.self_link == "self_link_value" assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetMachineTypeRequest +): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "machine_type": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -647,33 +672,44 @@ def test_get_rest_flattened(): return_value = compute.MachineType() # Wrap the value into a proper Response obj - json_return_value = compute.MachineType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.MachineType.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "machine_type": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", machine_type="machine_type_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "machine_type_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/machineTypes/{machine_type}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -693,30 +729,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.MachineTypeList( id="id_value", - items=[ - compute.MachineType( - accelerators=[compute.Accelerators(guest_accelerator_count=2452)] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.MachineTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.MachineTypeList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -724,23 +754,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.MachineType( - accelerators=[compute.Accelerators(guest_accelerator_count=2452)] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListMachineTypesRequest +): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -748,30 +797,36 @@ def test_list_rest_flattened(): return_value = compute.MachineTypeList() # Wrap the value into a proper Response obj - json_return_value = compute.MachineTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.MachineTypeList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/machineTypes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -783,11 +838,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = MachineTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.MachineTypeList( @@ -817,16 +874,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.MachineType) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -909,8 +965,10 @@ def test_machine_types_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_machine_types_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -935,30 +993,6 @@ def test_machine_types_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_machine_types_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.machine_types.transports.MachineTypesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.MachineTypesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_machine_types_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -970,7 +1004,6 @@ def test_machine_types_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_machine_types_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -987,22 +1020,6 @@ def test_machine_types_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_machine_types_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - MachineTypesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_machine_types_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1149,3 +1166,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = MachineTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py b/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py index 8aa7a7793..68ecafc95 100644 --- a/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py +++ b/tests/unit/gapic/compute_v1/test_network_endpoint_groups.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.network_endpoint_groups import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.network_endpoint_groups import pagers from google.cloud.compute_v1.services.network_endpoint_groups import transports -from google.cloud.compute_v1.services.network_endpoint_groups.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_network_endpoint_groups_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_network_endpoint_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_network_endpoint_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_network_endpoint_groups_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_network_endpoint_groups_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_network_endpoint_groups_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_network_endpoint_groups_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_network_endpoint_groups_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_network_endpoint_groups_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_network_endpoint_groups_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_network_endpoint_groups_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -455,37 +448,27 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroupAggregatedList( id="id_value", - items={ - "key_value": compute.NetworkEndpointGroupsScopedList( - network_endpoint_groups=[ - compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"} - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworkEndpointGroupAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -493,27 +476,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.NetworkEndpointGroupsScopedList( - network_endpoint_groups=[ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", + request_type=compute.AggregatedListNetworkEndpointGroupsRequest, +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): +def test_aggregated_list_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -522,30 +521,37 @@ def test_aggregated_list_rest_flattened(): return_value = compute.NetworkEndpointGroupAggregatedList() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworkEndpointGroupAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/networkEndpointGroups" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -557,13 +563,15 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = NetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkEndpointGroupAggregatedList( @@ -601,10 +609,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.NetworkEndpointGroupsScopedList) assert pager.get("h") is None @@ -622,7 +629,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.NetworkEndpointGroupsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -635,9 +642,20 @@ def test_attach_network_endpoints_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -647,7 +665,6 @@ def test_attach_network_endpoints_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -665,14 +682,13 @@ def test_attach_network_endpoints_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.attach_network_endpoints(request) @@ -683,7 +699,6 @@ def test_attach_network_endpoints_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -701,19 +716,51 @@ def test_attach_network_endpoints_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_attach_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.AttachNetworkEndpointsNetworkEndpointGroupRequest, +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request_init[ + "network_endpoint_groups_attach_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.attach_network_endpoints(request) + + def test_attach_network_endpoints_rest_from_dict(): test_attach_network_endpoints_rest(request_type=dict) -def test_attach_network_endpoints_rest_flattened(): +def test_attach_network_endpoints_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -722,45 +769,48 @@ def test_attach_network_endpoints_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_endpoint_groups_attach_endpoints_request_resource = compute.NetworkEndpointGroupsAttachEndpointsRequest( - network_endpoints=[ - compute.NetworkEndpoint(annotations={"key_value": "value_value"}) - ] - ) - client.attach_network_endpoints( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", network_endpoint_group="network_endpoint_group_value", - network_endpoint_groups_attach_endpoints_request_resource=network_endpoint_groups_attach_endpoints_request_resource, + network_endpoint_groups_attach_endpoints_request_resource=compute.NetworkEndpointGroupsAttachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) + mock_args.update(sample_request) + client.attach_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkEndpointGroupsAttachEndpointsRequest.to_json( - network_endpoint_groups_attach_endpoints_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_attach_network_endpoints_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/attachNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_attach_network_endpoints_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -786,9 +836,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -798,7 +852,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -816,14 +869,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -834,7 +886,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -852,19 +903,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteNetworkEndpointGroupRequest +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -873,34 +948,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -922,9 +1006,20 @@ def test_detach_network_endpoints_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -934,7 +1029,6 @@ def test_detach_network_endpoints_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -952,14 +1046,13 @@ def test_detach_network_endpoints_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.detach_network_endpoints(request) @@ -970,7 +1063,6 @@ def test_detach_network_endpoints_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -988,19 +1080,51 @@ def test_detach_network_endpoints_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_detach_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.DetachNetworkEndpointsNetworkEndpointGroupRequest, +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request_init[ + "network_endpoint_groups_detach_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.detach_network_endpoints(request) + + def test_detach_network_endpoints_rest_from_dict(): test_detach_network_endpoints_rest(request_type=dict) -def test_detach_network_endpoints_rest_flattened(): +def test_detach_network_endpoints_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1009,45 +1133,48 @@ def test_detach_network_endpoints_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_endpoint_groups_detach_endpoints_request_resource = compute.NetworkEndpointGroupsDetachEndpointsRequest( - network_endpoints=[ - compute.NetworkEndpoint(annotations={"key_value": "value_value"}) - ] - ) - client.detach_network_endpoints( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", network_endpoint_group="network_endpoint_group_value", - network_endpoint_groups_detach_endpoints_request_resource=network_endpoint_groups_detach_endpoints_request_resource, + network_endpoint_groups_detach_endpoints_request_resource=compute.NetworkEndpointGroupsDetachEndpointsRequest( + network_endpoints=[ + compute.NetworkEndpoint(annotations={"key_value": "value_value"}) + ] + ), ) + mock_args.update(sample_request) + client.detach_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkEndpointGroupsDetachEndpointsRequest.to_json( - network_endpoint_groups_detach_endpoints_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_detach_network_endpoints_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/detachNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_detach_network_endpoints_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1073,20 +1200,18 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"}, - app_engine=compute.NetworkEndpointGroupAppEngine(service="service_value"), - cloud_function=compute.NetworkEndpointGroupCloudFunction( - function="function_value" - ), - cloud_run=compute.NetworkEndpointGroupCloudRun(service="service_value"), creation_timestamp="creation_timestamp_value", default_port=1289, description="description_value", @@ -1103,25 +1228,15 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.NetworkEndpointGroup) - assert response.annotations == {"key_value": "value_value"} - assert response.app_engine == compute.NetworkEndpointGroupAppEngine( - service="service_value" - ) - assert response.cloud_function == compute.NetworkEndpointGroupCloudFunction( - function="function_value" - ) - assert response.cloud_run == compute.NetworkEndpointGroupCloudRun( - service="service_value" - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_port == 1289 assert response.description == "description_value" @@ -1140,13 +1255,40 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetNetworkEndpointGroupRequest +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1155,34 +1297,43 @@ def test_get_rest_flattened(): return_value = compute.NetworkEndpointGroup() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroup.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1203,9 +1354,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["network_endpoint_group_resource"] = compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1215,7 +1369,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1233,14 +1386,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1251,7 +1403,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1269,19 +1420,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertNetworkEndpointGroupRequest +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["network_endpoint_group_resource"] = compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1290,41 +1464,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_endpoint_group_resource = compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"} - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - network_endpoint_group_resource=network_endpoint_group_resource, + network_endpoint_group_resource=compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkEndpointGroup.to_json( - network_endpoint_group_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1347,28 +1521,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroupList( id="id_value", - items=[ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1376,22 +1546,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListNetworkEndpointGroupsRequest +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1400,31 +1589,35 @@ def test_list_rest_flattened(): return_value = compute.NetworkEndpointGroupList() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1437,13 +1630,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = NetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkEndpointGroupList( @@ -1473,16 +1668,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NetworkEndpointGroup) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1495,37 +1689,32 @@ def test_list_network_endpoints_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request_init[ + "network_endpoint_groups_list_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsListEndpointsRequest( + health_status=compute.NetworkEndpointGroupsListEndpointsRequest.HealthStatus.SHOW + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroupsListNetworkEndpoints( - id="id_value", - items=[ - compute.NetworkEndpointWithHealthStatus( - healths=[ - compute.HealthStatusForNetworkEndpoint( - backend_service=compute.BackendServiceReference( - backend_service="backend_service_value" - ) - ) - ] - ) - ], - kind="kind_value", - next_page_token="next_page_token_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), + id="id_value", kind="kind_value", next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_network_endpoints(request) @@ -1533,29 +1722,50 @@ def test_list_network_endpoints_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListNetworkEndpointsPager) assert response.id == "id_value" - assert response.items == [ - compute.NetworkEndpointWithHealthStatus( - healths=[ - compute.HealthStatusForNetworkEndpoint( - backend_service=compute.BackendServiceReference( - backend_service="backend_service_value" - ) - ) - ] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_network_endpoints_rest_bad_request( + transport: str = "rest", + request_type=compute.ListNetworkEndpointsNetworkEndpointGroupsRequest, +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + request_init[ + "network_endpoint_groups_list_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsListEndpointsRequest( + health_status=compute.NetworkEndpointGroupsListEndpointsRequest.HealthStatus.SHOW + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_network_endpoints(request) def test_list_network_endpoints_rest_from_dict(): test_list_network_endpoints_rest(request_type=dict) -def test_list_network_endpoints_rest_flattened(): +def test_list_network_endpoints_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1564,45 +1774,48 @@ def test_list_network_endpoints_rest_flattened(): return_value = compute.NetworkEndpointGroupsListNetworkEndpoints() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworkEndpointGroupsListNetworkEndpoints.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_endpoint_groups_list_endpoints_request_resource = compute.NetworkEndpointGroupsListEndpointsRequest( - health_status=compute.NetworkEndpointGroupsListEndpointsRequest.HealthStatus.SHOW - ) - client.list_network_endpoints( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", network_endpoint_group="network_endpoint_group_value", - network_endpoint_groups_list_endpoints_request_resource=network_endpoint_groups_list_endpoints_request_resource, + network_endpoint_groups_list_endpoints_request_resource=compute.NetworkEndpointGroupsListEndpointsRequest( + health_status=compute.NetworkEndpointGroupsListEndpointsRequest.HealthStatus.SHOW + ), ) + mock_args.update(sample_request) + client.list_network_endpoints(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkEndpointGroupsListEndpointsRequest.to_json( - network_endpoint_groups_list_endpoints_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_list_network_endpoints_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{network_endpoint_group}/listNetworkEndpoints" + % client.transport._host, + args[1], + ) + + +def test_list_network_endpoints_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1619,13 +1832,15 @@ def test_list_network_endpoints_rest_flattened_error(): ) -def test_list_network_endpoints_pager(): +def test_list_network_endpoints_rest_pager(): client = NetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkEndpointGroupsListNetworkEndpoints( @@ -1664,10 +1879,18 @@ def test_list_network_endpoints_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_network_endpoints(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "network_endpoint_group": "sample3", + } + sample_request[ + "network_endpoint_groups_list_endpoints_request_resource" + ] = compute.NetworkEndpointGroupsListEndpointsRequest( + health_status=compute.NetworkEndpointGroupsListEndpointsRequest.HealthStatus.SHOW + ) - assert pager._metadata == metadata + pager = client.list_network_endpoints(request=sample_request) results = list(pager) assert len(results) == 6 @@ -1675,7 +1898,7 @@ def test_list_network_endpoints_pager(): isinstance(i, compute.NetworkEndpointWithHealthStatus) for i in results ) - pages = list(client.list_network_endpoints(request={}).pages) + pages = list(client.list_network_endpoints(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1688,9 +1911,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1700,9 +1926,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1712,13 +1938,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsNetworkEndpointGroupRequest, +): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1727,43 +1980,46 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/networkEndpointGroups/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): client = NetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1866,8 +2122,10 @@ def test_network_endpoint_groups_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_network_endpoint_groups_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1891,29 +2149,6 @@ def test_network_endpoint_groups_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_network_endpoint_groups_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.network_endpoint_groups.transports.NetworkEndpointGroupsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NetworkEndpointGroupsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_network_endpoint_groups_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1925,7 +2160,6 @@ def test_network_endpoint_groups_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_network_endpoint_groups_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1941,21 +2175,6 @@ def test_network_endpoint_groups_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_network_endpoint_groups_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NetworkEndpointGroupsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_network_endpoint_groups_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2102,3 +2321,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = NetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_networks.py b/tests/unit/gapic/compute_v1/test_networks.py index 385c7c697..550adbd4f 100644 --- a/tests/unit/gapic/compute_v1/test_networks.py +++ b/tests/unit/gapic/compute_v1/test_networks.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.networks import NetworksClient from google.cloud.compute_v1.services.networks import pagers from google.cloud.compute_v1.services.networks import transports -from google.cloud.compute_v1.services.networks.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_networks_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_networks_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_networks_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_networks_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_networks_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_networks_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_networks_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_networks_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_networks_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_networks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_networks_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_networks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_networks_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_networks_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_networks_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_networks_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_networks_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_networks_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,9 +404,12 @@ def test_add_peering_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init[ + "networks_add_peering_request_resource" + ] = compute.NetworksAddPeeringRequest(auto_create_routes=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -423,7 +419,6 @@ def test_add_peering_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -441,14 +436,13 @@ def test_add_peering_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_peering(request) @@ -459,7 +453,6 @@ def test_add_peering_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -477,18 +470,43 @@ def test_add_peering_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_peering_rest_bad_request( + transport: str = "rest", request_type=compute.AddPeeringNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init[ + "networks_add_peering_request_resource" + ] = compute.NetworksAddPeeringRequest(auto_create_routes=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_peering(request) + + def test_add_peering_rest_from_dict(): test_add_peering_rest(request_type=dict) -def test_add_peering_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_peering_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,40 +514,42 @@ def test_add_peering_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - networks_add_peering_request_resource = compute.NetworksAddPeeringRequest( - auto_create_routes=True - ) - client.add_peering( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network="network_value", - networks_add_peering_request_resource=networks_add_peering_request_resource, + networks_add_peering_request_resource=compute.NetworksAddPeeringRequest( + auto_create_routes=True + ), ) + mock_args.update(sample_request) + client.add_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) - assert compute.NetworksAddPeeringRequest.to_json( - networks_add_peering_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_peering_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}/addPeering" + % client.transport._host, + args[1], + ) + + +def test_add_peering_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -551,9 +571,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -563,7 +583,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -581,14 +600,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -599,7 +617,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -617,18 +634,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -636,30 +675,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", network="network_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", network="network_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -676,9 +721,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNetworkReques credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -693,18 +738,14 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNetworkReques kind="kind_value", mtu=342, name="name_value", - peerings=[compute.NetworkPeering(auto_create_routes=True)], - routing_config=compute.NetworkRoutingConfig( - routing_mode=compute.NetworkRoutingConfig.RoutingMode.GLOBAL - ), self_link="self_link_value", subnetworks=["subnetworks_value"], ) # Wrap the value into a proper Response obj - json_return_value = compute.Network.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Network.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -720,20 +761,41 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNetworkReques assert response.kind == "kind_value" assert response.mtu == 342 assert response.name == "name_value" - assert response.peerings == [compute.NetworkPeering(auto_create_routes=True)] - assert response.routing_config == compute.NetworkRoutingConfig( - routing_mode=compute.NetworkRoutingConfig.RoutingMode.GLOBAL - ) assert response.self_link == "self_link_value" assert response.subnetworks == ["subnetworks_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -741,30 +803,36 @@ def test_get_rest_flattened(): return_value = compute.Network() # Wrap the value into a proper Response obj - json_return_value = compute.Network.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Network.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", network="network_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", network="network_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -783,54 +851,60 @@ def test_get_effective_firewalls_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.NetworksGetEffectiveFirewallsResponse( - firewall_policys=[ - compute.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy( - display_name="display_name_value" - ) - ], - firewalls=[ - compute.Firewall( - allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")] - ) - ], - ) + return_value = compute.NetworksGetEffectiveFirewallsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworksGetEffectiveFirewallsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_effective_firewalls(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.NetworksGetEffectiveFirewallsResponse) - assert response.firewall_policys == [ - compute.NetworksGetEffectiveFirewallsResponseEffectiveFirewallPolicy( - display_name="display_name_value" - ) - ] - assert response.firewalls == [ - compute.Firewall(allowed=[compute.Allowed(I_p_protocol="I_p_protocol_value")]) - ] + + +def test_get_effective_firewalls_rest_bad_request( + transport: str = "rest", request_type=compute.GetEffectiveFirewallsNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_effective_firewalls(request) def test_get_effective_firewalls_rest_from_dict(): test_get_effective_firewalls_rest(request_type=dict) -def test_get_effective_firewalls_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_effective_firewalls_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -838,32 +912,38 @@ def test_get_effective_firewalls_rest_flattened(): return_value = compute.NetworksGetEffectiveFirewallsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.NetworksGetEffectiveFirewallsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_effective_firewalls( - project="project_value", network="network_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", network="network_value",) + mock_args.update(sample_request) + client.get_effective_firewalls(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}/getEffectiveFirewalls" + % client.transport._host, + args[1], + ) -def test_get_effective_firewalls_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_effective_firewalls_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -882,9 +962,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["network_resource"] = compute.Network(I_pv4_range="I_pv4_range_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -894,7 +975,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -912,14 +992,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -930,7 +1009,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -948,18 +1026,41 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["network_resource"] = compute.Network(I_pv4_range="I_pv4_range_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -967,35 +1068,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_resource = compute.Network(I_pv4_range="I_pv4_range_value") - client.insert( - project="project_value", network_resource=network_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + network_resource=compute.Network(I_pv4_range="I_pv4_range_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Network.to_json( - network_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1012,26 +1117,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListNetworksReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkList( id="id_value", - items=[compute.Network(I_pv4_range="I_pv4_range_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1039,19 +1142,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListNetworksReq # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Network(I_pv4_range="I_pv4_range_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListNetworksRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1059,27 +1185,36 @@ def test_list_rest_flattened(): return_value = compute.NetworkList() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1089,11 +1224,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkList( @@ -1115,16 +1252,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Network) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1136,26 +1272,24 @@ def test_list_peering_routes_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ExchangedPeeringRoutesList( id="id_value", - items=[compute.ExchangedPeeringRoute(dest_range="dest_range_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ExchangedPeeringRoutesList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ExchangedPeeringRoutesList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_peering_routes(request) @@ -1163,21 +1297,42 @@ def test_list_peering_routes_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPeeringRoutesPager) assert response.id == "id_value" - assert response.items == [ - compute.ExchangedPeeringRoute(dest_range="dest_range_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_peering_routes_rest_bad_request( + transport: str = "rest", request_type=compute.ListPeeringRoutesNetworksRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_peering_routes(request) def test_list_peering_routes_rest_from_dict(): test_list_peering_routes_rest(request_type=dict) -def test_list_peering_routes_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_peering_routes_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1185,30 +1340,36 @@ def test_list_peering_routes_rest_flattened(): return_value = compute.ExchangedPeeringRoutesList() # Wrap the value into a proper Response obj - json_return_value = compute.ExchangedPeeringRoutesList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ExchangedPeeringRoutesList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_peering_routes( - project="project_value", network="network_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", network="network_value",) + mock_args.update(sample_request) + client.list_peering_routes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}/listPeeringRoutes" + % client.transport._host, + args[1], + ) -def test_list_peering_routes_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_peering_routes_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1220,11 +1381,13 @@ def test_list_peering_routes_rest_flattened_error(): ) -def test_list_peering_routes_pager(): +def test_list_peering_routes_rest_pager(): client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ExchangedPeeringRoutesList( @@ -1259,16 +1422,15 @@ def test_list_peering_routes_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_peering_routes(request={}) + sample_request = {"project": "sample1", "network": "sample2"} - assert pager._metadata == metadata + pager = client.list_peering_routes(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ExchangedPeeringRoute) for i in results) - pages = list(client.list_peering_routes(request={}).pages) + pages = list(client.list_peering_routes(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1278,9 +1440,10 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchNetworkRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init["network_resource"] = compute.Network(I_pv4_range="I_pv4_range_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1290,7 +1453,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchNetworkRe creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1308,14 +1470,13 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchNetworkRe target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1326,7 +1487,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchNetworkRe assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1344,18 +1504,41 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchNetworkRe assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init["network_resource"] = compute.Network(I_pv4_range="I_pv4_range_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1363,38 +1546,40 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_resource = compute.Network(I_pv4_range="I_pv4_range_value") - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network="network_value", - network_resource=network_resource, + network_resource=compute.Network(I_pv4_range="I_pv4_range_value"), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) - assert compute.Network.to_json( - network_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1414,9 +1599,12 @@ def test_remove_peering_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init[ + "networks_remove_peering_request_resource" + ] = compute.NetworksRemovePeeringRequest(name="name_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1426,7 +1614,6 @@ def test_remove_peering_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1444,14 +1631,13 @@ def test_remove_peering_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_peering(request) @@ -1462,7 +1648,6 @@ def test_remove_peering_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1480,18 +1665,43 @@ def test_remove_peering_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_peering_rest_bad_request( + transport: str = "rest", request_type=compute.RemovePeeringNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init[ + "networks_remove_peering_request_resource" + ] = compute.NetworksRemovePeeringRequest(name="name_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_peering(request) + + def test_remove_peering_rest_from_dict(): test_remove_peering_rest(request_type=dict) -def test_remove_peering_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_peering_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1499,40 +1709,42 @@ def test_remove_peering_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - networks_remove_peering_request_resource = compute.NetworksRemovePeeringRequest( - name="name_value" - ) - client.remove_peering( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network="network_value", - networks_remove_peering_request_resource=networks_remove_peering_request_resource, + networks_remove_peering_request_resource=compute.NetworksRemovePeeringRequest( + name="name_value" + ), ) + mock_args.update(sample_request) + client.remove_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) - assert compute.NetworksRemovePeeringRequest.to_json( - networks_remove_peering_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_peering_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}/removePeering" + % client.transport._host, + args[1], + ) + + +def test_remove_peering_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1554,9 +1766,9 @@ def test_switch_to_custom_mode_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1566,7 +1778,6 @@ def test_switch_to_custom_mode_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1584,14 +1795,13 @@ def test_switch_to_custom_mode_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.switch_to_custom_mode(request) @@ -1602,7 +1812,6 @@ def test_switch_to_custom_mode_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1620,18 +1829,40 @@ def test_switch_to_custom_mode_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_switch_to_custom_mode_rest_bad_request( + transport: str = "rest", request_type=compute.SwitchToCustomModeNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.switch_to_custom_mode(request) + + def test_switch_to_custom_mode_rest_from_dict(): test_switch_to_custom_mode_rest(request_type=dict) -def test_switch_to_custom_mode_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_switch_to_custom_mode_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1639,30 +1870,36 @@ def test_switch_to_custom_mode_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.switch_to_custom_mode( - project="project_value", network="network_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", network="network_value",) + mock_args.update(sample_request) + client.switch_to_custom_mode(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}/switchToCustomMode" + % client.transport._host, + args[1], + ) -def test_switch_to_custom_mode_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_switch_to_custom_mode_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1681,9 +1918,14 @@ def test_update_peering_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init[ + "networks_update_peering_request_resource" + ] = compute.NetworksUpdatePeeringRequest( + network_peering=compute.NetworkPeering(auto_create_routes=True) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1693,7 +1935,6 @@ def test_update_peering_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1711,14 +1952,13 @@ def test_update_peering_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_peering(request) @@ -1729,7 +1969,6 @@ def test_update_peering_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1747,18 +1986,45 @@ def test_update_peering_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_peering_rest_bad_request( + transport: str = "rest", request_type=compute.UpdatePeeringNetworkRequest +): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "network": "sample2"} + request_init[ + "networks_update_peering_request_resource" + ] = compute.NetworksUpdatePeeringRequest( + network_peering=compute.NetworkPeering(auto_create_routes=True) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_peering(request) + + def test_update_peering_rest_from_dict(): test_update_peering_rest(request_type=dict) -def test_update_peering_rest_flattened(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_peering_rest_flattened(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1766,40 +2032,42 @@ def test_update_peering_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - networks_update_peering_request_resource = compute.NetworksUpdatePeeringRequest( - network_peering=compute.NetworkPeering(auto_create_routes=True) - ) - client.update_peering( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "network": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", network="network_value", - networks_update_peering_request_resource=networks_update_peering_request_resource, + networks_update_peering_request_resource=compute.NetworksUpdatePeeringRequest( + network_peering=compute.NetworkPeering(auto_create_routes=True) + ), ) + mock_args.update(sample_request) + client.update_peering(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "network_value" in http_call[1] + str(body) + str(params) - assert compute.NetworksUpdatePeeringRequest.to_json( - networks_update_peering_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_peering_rest_flattened_error(): - client = NetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/networks/{network}/updatePeering" + % client.transport._host, + args[1], + ) + + +def test_update_peering_rest_flattened_error(transport: str = "rest"): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1900,8 +2168,10 @@ def test_networks_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_networks_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1925,29 +2195,6 @@ def test_networks_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_networks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.networks.transports.NetworksTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NetworksTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_networks_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1959,7 +2206,6 @@ def test_networks_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_networks_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1975,21 +2221,6 @@ def test_networks_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_networks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NetworksClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_networks_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2136,3 +2367,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = NetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_node_groups.py b/tests/unit/gapic/compute_v1/test_node_groups.py index 4e48c96dd..6026d2a33 100644 --- a/tests/unit/gapic/compute_v1/test_node_groups.py +++ b/tests/unit/gapic/compute_v1/test_node_groups.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.node_groups import NodeGroupsClient from google.cloud.compute_v1.services.node_groups import pagers from google.cloud.compute_v1.services.node_groups import transports -from google.cloud.compute_v1.services.node_groups.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_node_groups_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_node_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_node_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_node_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_node_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_node_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_node_groups_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_node_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_node_groups_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_node_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_node_groups_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_node_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_node_groups_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_node_groups_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_node_groups_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_node_groups_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_node_groups_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_node_groups_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,9 +408,12 @@ def test_add_nodes_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init[ + "node_groups_add_nodes_request_resource" + ] = compute.NodeGroupsAddNodesRequest(additional_node_count=2214) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -427,7 +423,6 @@ def test_add_nodes_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -445,14 +440,13 @@ def test_add_nodes_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_nodes(request) @@ -463,7 +457,6 @@ def test_add_nodes_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -481,18 +474,43 @@ def test_add_nodes_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_nodes_rest_bad_request( + transport: str = "rest", request_type=compute.AddNodesNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init[ + "node_groups_add_nodes_request_resource" + ] = compute.NodeGroupsAddNodesRequest(additional_node_count=2214) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_nodes(request) + + def test_add_nodes_rest_from_dict(): test_add_nodes_rest(request_type=dict) -def test_add_nodes_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_nodes_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -500,42 +518,47 @@ def test_add_nodes_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - node_groups_add_nodes_request_resource = compute.NodeGroupsAddNodesRequest( - additional_node_count=2214 - ) - client.add_nodes( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", - node_groups_add_nodes_request_resource=node_groups_add_nodes_request_resource, + node_groups_add_nodes_request_resource=compute.NodeGroupsAddNodesRequest( + additional_node_count=2214 + ), ) + mock_args.update(sample_request) + client.add_nodes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) - assert compute.NodeGroupsAddNodesRequest.to_json( - node_groups_add_nodes_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_nodes_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/addNodes" + % client.transport._host, + args[1], + ) + + +def test_add_nodes_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -558,37 +581,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeGroupAggregatedList( id="id_value", - items={ - "key_value": compute.NodeGroupsScopedList( - node_groups=[ - compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy( - max_nodes=958 - ) - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroupAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroupAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -596,28 +607,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.NodeGroupsScopedList( - node_groups=[ - compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListNodeGroupsRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -625,27 +651,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.NodeGroupAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroupAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroupAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/nodeGroups" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -655,11 +690,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeGroupAggregatedList( @@ -692,10 +729,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.NodeGroupsScopedList) assert pager.get("h") is None @@ -710,7 +746,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.NodeGroupsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -722,9 +758,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -734,7 +770,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -752,14 +787,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -770,7 +804,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -788,18 +821,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -807,31 +862,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -851,9 +917,12 @@ def test_delete_nodes_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init[ + "node_groups_delete_nodes_request_resource" + ] = compute.NodeGroupsDeleteNodesRequest(nodes=["nodes_value"]) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -863,7 +932,6 @@ def test_delete_nodes_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -881,14 +949,13 @@ def test_delete_nodes_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_nodes(request) @@ -899,7 +966,6 @@ def test_delete_nodes_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -917,18 +983,43 @@ def test_delete_nodes_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_nodes_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteNodesNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init[ + "node_groups_delete_nodes_request_resource" + ] = compute.NodeGroupsDeleteNodesRequest(nodes=["nodes_value"]) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_nodes(request) + + def test_delete_nodes_rest_from_dict(): test_delete_nodes_rest(request_type=dict) -def test_delete_nodes_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_nodes_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -936,42 +1027,47 @@ def test_delete_nodes_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - node_groups_delete_nodes_request_resource = compute.NodeGroupsDeleteNodesRequest( - nodes=["nodes_value"] - ) - client.delete_nodes( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", - node_groups_delete_nodes_request_resource=node_groups_delete_nodes_request_resource, + node_groups_delete_nodes_request_resource=compute.NodeGroupsDeleteNodesRequest( + nodes=["nodes_value"] + ), ) + mock_args.update(sample_request) + client.delete_nodes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) - assert compute.NodeGroupsDeleteNodesRequest.to_json( - node_groups_delete_nodes_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_delete_nodes_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/deleteNodes" + % client.transport._host, + args[1], + ) + + +def test_delete_nodes_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -992,15 +1088,14 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeGroupRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958), creation_timestamp="creation_timestamp_value", description="description_value", fingerprint="fingerprint_value", @@ -1008,9 +1103,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeGroupRequ kind="kind_value", location_hint="location_hint_value", maintenance_policy=compute.NodeGroup.MaintenancePolicy.DEFAULT, - maintenance_window=compute.NodeGroupMaintenanceWindow( - maintenance_duration=compute.Duration(nanos=543) - ), name="name_value", node_template="node_template_value", self_link="self_link_value", @@ -1020,18 +1112,15 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeGroupRequ ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroup.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.NodeGroup) - assert response.autoscaling_policy == compute.NodeGroupAutoscalingPolicy( - max_nodes=958 - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" @@ -1039,9 +1128,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeGroupRequ assert response.kind == "kind_value" assert response.location_hint == "location_hint_value" assert response.maintenance_policy == compute.NodeGroup.MaintenancePolicy.DEFAULT - assert response.maintenance_window == compute.NodeGroupMaintenanceWindow( - maintenance_duration=compute.Duration(nanos=543) - ) assert response.name == "name_value" assert response.node_template == "node_template_value" assert response.self_link == "self_link_value" @@ -1050,12 +1136,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeGroupRequ assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1063,31 +1174,42 @@ def test_get_rest_flattened(): return_value = compute.NodeGroup() # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroup.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1107,60 +1229,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1168,31 +1291,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1212,9 +1346,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["node_group_resource"] = compute.NodeGroup( + autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1224,7 +1361,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1242,14 +1378,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1260,7 +1395,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1278,18 +1412,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["node_group_resource"] = compute.NodeGroup( + autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1297,42 +1456,43 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - node_group_resource = compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", initial_node_count=1911, - node_group_resource=node_group_resource, + node_group_resource=compute.NodeGroup( + autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert str(1911) in http_call[1] + str(body) + str(params) - assert compute.NodeGroup.to_json( - node_group_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1353,30 +1513,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListNodeGroupsR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeGroupList( id="id_value", - items=[ - compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroupList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1384,23 +1538,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListNodeGroupsR # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListNodeGroupsRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1408,30 +1581,36 @@ def test_list_rest_flattened(): return_value = compute.NodeGroupList() # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroupList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1441,11 +1620,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeGroupList( @@ -1467,16 +1648,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NodeGroup) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1488,30 +1668,24 @@ def test_list_nodes_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeGroupsListNodes( id="id_value", - items=[ - compute.NodeGroupNode( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroupsListNodes.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroupsListNodes.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_nodes(request) @@ -1519,23 +1693,42 @@ def test_list_nodes_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListNodesPager) assert response.id == "id_value" - assert response.items == [ - compute.NodeGroupNode( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_nodes_rest_bad_request( + transport: str = "rest", request_type=compute.ListNodesNodeGroupsRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_nodes(request) def test_list_nodes_rest_from_dict(): test_list_nodes_rest(request_type=dict) -def test_list_nodes_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_nodes_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1543,31 +1736,42 @@ def test_list_nodes_rest_flattened(): return_value = compute.NodeGroupsListNodes() # Wrap the value into a proper Response obj - json_return_value = compute.NodeGroupsListNodes.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeGroupsListNodes.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_nodes( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", ) + mock_args.update(sample_request) + client.list_nodes(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/listNodes" + % client.transport._host, + args[1], + ) -def test_list_nodes_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_nodes_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1580,11 +1784,13 @@ def test_list_nodes_rest_flattened_error(): ) -def test_list_nodes_pager(): +def test_list_nodes_rest_pager(): client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeGroupsListNodes( @@ -1614,16 +1820,19 @@ def test_list_nodes_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_nodes(request={}) + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } - assert pager._metadata == metadata + pager = client.list_nodes(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NodeGroupNode) for i in results) - pages = list(client.list_nodes(request={}).pages) + pages = list(client.list_nodes(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1635,9 +1844,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init["node_group_resource"] = compute.NodeGroup( + autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1647,7 +1859,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1665,14 +1876,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1683,7 +1893,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1701,18 +1910,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init["node_group_resource"] = compute.NodeGroup( + autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1720,42 +1954,47 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - node_group_resource = compute.NodeGroup( - autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", - node_group_resource=node_group_resource, + node_group_resource=compute.NodeGroup( + autoscaling_policy=compute.NodeGroupAutoscalingPolicy(max_nodes=958) + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) - assert compute.NodeGroup.to_json( - node_group_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1778,60 +2017,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1839,42 +2085,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - zone_set_policy_request_resource = compute.ZoneSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - zone_set_policy_request_resource=zone_set_policy_request_resource, + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.ZoneSetPolicyRequest.to_json( - zone_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1897,9 +2148,12 @@ def test_set_node_template_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init[ + "node_groups_set_node_template_request_resource" + ] = compute.NodeGroupsSetNodeTemplateRequest(node_template="node_template_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1909,7 +2163,6 @@ def test_set_node_template_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1927,14 +2180,13 @@ def test_set_node_template_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_node_template(request) @@ -1945,7 +2197,6 @@ def test_set_node_template_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1963,18 +2214,43 @@ def test_set_node_template_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_node_template_rest_bad_request( + transport: str = "rest", request_type=compute.SetNodeTemplateNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_group": "sample3"} + request_init[ + "node_groups_set_node_template_request_resource" + ] = compute.NodeGroupsSetNodeTemplateRequest(node_template="node_template_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_node_template(request) + + def test_set_node_template_rest_from_dict(): test_set_node_template_rest(request_type=dict) -def test_set_node_template_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_node_template_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1982,42 +2258,47 @@ def test_set_node_template_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - node_groups_set_node_template_request_resource = compute.NodeGroupsSetNodeTemplateRequest( - node_template="node_template_value" - ) - client.set_node_template( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_group="node_group_value", - node_groups_set_node_template_request_resource=node_groups_set_node_template_request_resource, + node_groups_set_node_template_request_resource=compute.NodeGroupsSetNodeTemplateRequest( + node_template="node_template_value" + ), ) + mock_args.update(sample_request) + client.set_node_template(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_group_value" in http_call[1] + str(body) + str(params) - assert compute.NodeGroupsSetNodeTemplateRequest.to_json( - node_groups_set_node_template_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_node_template_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{node_group}/setNodeTemplate" + % client.transport._host, + args[1], + ) + + +def test_set_node_template_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2040,9 +2321,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2052,9 +2336,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -2064,12 +2348,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsNodeGroupRequest +): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2077,42 +2389,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = NodeGroupsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeGroups/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2216,8 +2533,10 @@ def test_node_groups_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_node_groups_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2241,29 +2560,6 @@ def test_node_groups_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_node_groups_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.node_groups.transports.NodeGroupsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NodeGroupsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_node_groups_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2275,7 +2571,6 @@ def test_node_groups_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_node_groups_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2291,21 +2586,6 @@ def test_node_groups_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_node_groups_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NodeGroupsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_node_groups_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2452,3 +2732,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = NodeGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_node_templates.py b/tests/unit/gapic/compute_v1/test_node_templates.py index b71e569f3..debe7c4ea 100644 --- a/tests/unit/gapic/compute_v1/test_node_templates.py +++ b/tests/unit/gapic/compute_v1/test_node_templates.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.node_templates import NodeTemplatesClient from google.cloud.compute_v1.services.node_templates import pagers from google.cloud.compute_v1.services.node_templates import transports -from google.cloud.compute_v1.services.node_templates.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -194,7 +178,7 @@ def test_node_templates_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -203,6 +187,7 @@ def test_node_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -210,7 +195,7 @@ def test_node_templates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +204,7 @@ def test_node_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +212,7 @@ def test_node_templates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +221,7 @@ def test_node_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -254,7 +241,7 @@ def test_node_templates_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -263,6 +250,7 @@ def test_node_templates_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -295,7 +283,7 @@ def test_node_templates_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -312,6 +300,7 @@ def test_node_templates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -336,7 +325,7 @@ def test_node_templates_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -345,6 +334,7 @@ def test_node_templates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -357,7 +347,7 @@ def test_node_templates_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -366,6 +356,7 @@ def test_node_templates_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -380,7 +371,7 @@ def test_node_templates_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -389,6 +380,7 @@ def test_node_templates_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -403,7 +395,7 @@ def test_node_templates_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -412,6 +404,7 @@ def test_node_templates_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -422,37 +415,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeTemplateAggregatedList( id="id_value", - items={ - "key_value": compute.NodeTemplatesScopedList( - node_templates=[ - compute.NodeTemplate( - accelerators=[ - compute.AcceleratorConfig(accelerator_count=1805) - ] - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeTemplateAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTemplateAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -460,28 +441,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.NodeTemplatesScopedList( - node_templates=[ - compute.NodeTemplate( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListNodeTemplatesRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -489,27 +485,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.NodeTemplateAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.NodeTemplateAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTemplateAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/nodeTemplates" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -519,11 +524,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeTemplateAggregatedList( @@ -558,10 +565,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.NodeTemplatesScopedList) assert pager.get("h") is None @@ -579,7 +585,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.NodeTemplatesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -591,9 +597,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "node_template": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -603,7 +613,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -621,14 +630,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -639,7 +647,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -657,18 +664,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteNodeTemplateRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "node_template": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -676,33 +709,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "node_template": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", node_template="node_template_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "node_template_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -720,77 +764,92 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeTemplateR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "node_template": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeTemplate( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)], cpu_overcommit_type=compute.NodeTemplate.CpuOvercommitType.CPU_OVERCOMMIT_TYPE_UNSPECIFIED, creation_timestamp="creation_timestamp_value", description="description_value", - disks=[compute.LocalDisk(disk_count=1075)], id=205, kind="kind_value", name="name_value", - node_affinity_labels={"key_value": "value_value"}, node_type="node_type_value", - node_type_flexibility=compute.NodeTemplateNodeTypeFlexibility( - cpus="cpus_value" - ), region="region_value", self_link="self_link_value", - server_binding=compute.ServerBinding( - type_=compute.ServerBinding.Type.RESTART_NODE_ON_ANY_SERVER - ), status=compute.NodeTemplate.Status.CREATING, status_message="status_message_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeTemplate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTemplate.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.NodeTemplate) - assert response.accelerators == [compute.AcceleratorConfig(accelerator_count=1805)] assert ( response.cpu_overcommit_type == compute.NodeTemplate.CpuOvercommitType.CPU_OVERCOMMIT_TYPE_UNSPECIFIED ) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.disks == [compute.LocalDisk(disk_count=1075)] assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.node_affinity_labels == {"key_value": "value_value"} assert response.node_type == "node_type_value" - assert response.node_type_flexibility == compute.NodeTemplateNodeTypeFlexibility( - cpus="cpus_value" - ) assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.server_binding == compute.ServerBinding( - type_=compute.ServerBinding.Type.RESTART_NODE_ON_ANY_SERVER - ) assert response.status == compute.NodeTemplate.Status.CREATING assert response.status_message == "status_message_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetNodeTemplateRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "node_template": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -798,33 +857,44 @@ def test_get_rest_flattened(): return_value = compute.NodeTemplate() # Wrap the value into a proper Response obj - json_return_value = compute.NodeTemplate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTemplate.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "node_template": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", node_template="node_template_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "node_template_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{node_template}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -844,60 +914,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyNodeTemplateRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -905,31 +976,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -949,9 +1031,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["node_template_resource"] = compute.NodeTemplate( + accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -961,7 +1046,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -979,14 +1063,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -997,7 +1080,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1015,18 +1097,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertNodeTemplateRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["node_template_resource"] = compute.NodeTemplate( + accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1034,40 +1141,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - node_template_resource = compute.NodeTemplate( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - node_template_resource=node_template_resource, + node_template_resource=compute.NodeTemplate( + accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.NodeTemplate.to_json( - node_template_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1089,30 +1198,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeTemplateList( id="id_value", - items=[ - compute.NodeTemplate( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeTemplateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTemplateList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1120,23 +1223,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.NodeTemplate( - accelerators=[compute.AcceleratorConfig(accelerator_count=1805)] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListNodeTemplatesRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1144,30 +1266,36 @@ def test_list_rest_flattened(): return_value = compute.NodeTemplateList() # Wrap the value into a proper Response obj - json_return_value = compute.NodeTemplateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTemplateList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1179,11 +1307,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeTemplateList( @@ -1213,16 +1343,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NodeTemplate) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1234,60 +1363,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyNodeTemplateRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1295,42 +1431,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_policy_request_resource = compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=region_set_policy_request_resource, + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetPolicyRequest.to_json( - region_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1353,9 +1494,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1365,9 +1509,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1377,12 +1521,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsNodeTemplateRequest +): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1390,42 +1562,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = NodeTemplatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/nodeTemplates/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1524,8 +1701,10 @@ def test_node_templates_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_node_templates_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1549,29 +1728,6 @@ def test_node_templates_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_node_templates_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.node_templates.transports.NodeTemplatesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NodeTemplatesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_node_templates_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1583,7 +1739,6 @@ def test_node_templates_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_node_templates_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1599,21 +1754,6 @@ def test_node_templates_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_node_templates_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NodeTemplatesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_node_templates_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1760,3 +1900,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = NodeTemplatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_node_types.py b/tests/unit/gapic/compute_v1/test_node_types.py index 50305d33f..b06b16d02 100644 --- a/tests/unit/gapic/compute_v1/test_node_types.py +++ b/tests/unit/gapic/compute_v1/test_node_types.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.node_types import NodeTypesClient from google.cloud.compute_v1.services.node_types import pagers from google.cloud.compute_v1.services.node_types import transports -from google.cloud.compute_v1.services.node_types.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -187,7 +171,7 @@ def test_node_types_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -196,6 +180,7 @@ def test_node_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -203,7 +188,7 @@ def test_node_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -212,6 +197,7 @@ def test_node_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -219,7 +205,7 @@ def test_node_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -228,6 +214,7 @@ def test_node_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -247,7 +234,7 @@ def test_node_types_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -256,6 +243,7 @@ def test_node_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -286,7 +274,7 @@ def test_node_types_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -303,6 +291,7 @@ def test_node_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -327,7 +316,7 @@ def test_node_types_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -336,6 +325,7 @@ def test_node_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -348,7 +338,7 @@ def test_node_types_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +347,7 @@ def test_node_types_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -371,7 +362,7 @@ def test_node_types_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,6 +371,7 @@ def test_node_types_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -394,7 +386,7 @@ def test_node_types_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -403,6 +395,7 @@ def test_node_types_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -413,31 +406,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeTypeAggregatedList( id="id_value", - items={ - "key_value": compute.NodeTypesScopedList( - node_types=[compute.NodeType(cpu_platform="cpu_platform_value")] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTypeAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -445,24 +432,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.NodeTypesScopedList( - node_types=[compute.NodeType(cpu_platform="cpu_platform_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListNodeTypesRequest +): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -470,27 +476,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.NodeTypeAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.NodeTypeAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTypeAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/nodeTypes" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -500,11 +515,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeTypeAggregatedList( @@ -537,10 +554,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.NodeTypesScopedList) assert pager.get("h") is None @@ -555,7 +571,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.NodeTypesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -565,9 +581,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeTypeReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_type": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -575,7 +591,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeTypeReque return_value = compute.NodeType( cpu_platform="cpu_platform_value", creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", guest_cpus=1090, id=205, @@ -588,9 +603,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeTypeReque ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeType.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -599,7 +614,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeTypeReque assert isinstance(response, compute.NodeType) assert response.cpu_platform == "cpu_platform_value" assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.guest_cpus == 1090 assert response.id == 205 @@ -611,12 +625,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetNodeTypeReque assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetNodeTypeRequest +): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "node_type": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -624,31 +663,42 @@ def test_get_rest_flattened(): return_value = compute.NodeType() # Wrap the value into a proper Response obj - json_return_value = compute.NodeType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeType.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "node_type": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", node_type="node_type_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "node_type_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeTypes/{node_type}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -666,26 +716,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListNodeTypesRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NodeTypeList( id="id_value", - items=[compute.NodeType(cpu_platform="cpu_platform_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NodeTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTypeList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -693,19 +741,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListNodeTypesRe # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.NodeType(cpu_platform="cpu_platform_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListNodeTypesRequest +): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -713,30 +784,36 @@ def test_list_rest_flattened(): return_value = compute.NodeTypeList() # Wrap the value into a proper Response obj - json_return_value = compute.NodeTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NodeTypeList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/nodeTypes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -746,11 +823,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = NodeTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NodeTypeList( @@ -772,16 +851,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NodeType) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -864,8 +942,10 @@ def test_node_types_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_node_types_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -890,30 +970,6 @@ def test_node_types_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_node_types_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.node_types.transports.NodeTypesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.NodeTypesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_node_types_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -925,7 +981,6 @@ def test_node_types_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_node_types_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -942,22 +997,6 @@ def test_node_types_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_node_types_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - NodeTypesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_node_types_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1104,3 +1143,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = NodeTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_packet_mirrorings.py b/tests/unit/gapic/compute_v1/test_packet_mirrorings.py index 41e9c1012..10dca7039 100644 --- a/tests/unit/gapic/compute_v1/test_packet_mirrorings.py +++ b/tests/unit/gapic/compute_v1/test_packet_mirrorings.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.packet_mirrorings import PacketMirroringsClient from google.cloud.compute_v1.services.packet_mirrorings import pagers from google.cloud.compute_v1.services.packet_mirrorings import transports -from google.cloud.compute_v1.services.packet_mirrorings.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_packet_mirrorings_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_packet_mirrorings_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_packet_mirrorings_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_packet_mirrorings_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_packet_mirrorings_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_packet_mirrorings_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_packet_mirrorings_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_packet_mirrorings_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_packet_mirrorings_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_packet_mirrorings_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_packet_mirrorings_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_packet_mirrorings_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_packet_mirrorings_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_packet_mirrorings_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_packet_mirrorings_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_packet_mirrorings_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_packet_mirrorings_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_packet_mirrorings_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,37 +427,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PacketMirroringAggregatedList( id="id_value", - items={ - "key_value": compute.PacketMirroringsScopedList( - packet_mirrorings=[ - compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.PacketMirroringAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PacketMirroringAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -472,30 +453,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.PacketMirroringsScopedList( - packet_mirrorings=[ - compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListPacketMirroringsRequest +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -503,27 +497,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.PacketMirroringAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.PacketMirroringAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PacketMirroringAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/packetMirrorings" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -533,11 +536,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.PacketMirroringAggregatedList( @@ -573,10 +578,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.PacketMirroringsScopedList) assert pager.get("h") is None @@ -594,7 +598,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.PacketMirroringsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -606,9 +610,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -618,7 +626,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -636,14 +643,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -654,7 +660,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -672,18 +677,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePacketMirroringRequest +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -691,33 +722,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", packet_mirroring="packet_mirroring_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "packet_mirroring_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -737,82 +779,85 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ), creation_timestamp="creation_timestamp_value", description="description_value", enable=compute.PacketMirroring.Enable.FALSE, - filter=compute.PacketMirroringFilter(I_p_protocols=["I_p_protocols_value"]), id=205, kind="kind_value", - mirrored_resources=compute.PacketMirroringMirroredResourceInfo( - instances=[ - compute.PacketMirroringMirroredResourceInfoInstanceInfo( - canonical_url="canonical_url_value" - ) - ] - ), name="name_value", - network=compute.PacketMirroringNetworkInfo( - canonical_url="canonical_url_value" - ), priority=898, region="region_value", self_link="self_link_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.PacketMirroring.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PacketMirroring.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.PacketMirroring) - assert response.collector_ilb == compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.enable == compute.PacketMirroring.Enable.FALSE - assert response.filter == compute.PacketMirroringFilter( - I_p_protocols=["I_p_protocols_value"] - ) assert response.id == 205 assert response.kind == "kind_value" - assert response.mirrored_resources == compute.PacketMirroringMirroredResourceInfo( - instances=[ - compute.PacketMirroringMirroredResourceInfoInstanceInfo( - canonical_url="canonical_url_value" - ) - ] - ) assert response.name == "name_value" - assert response.network == compute.PacketMirroringNetworkInfo( - canonical_url="canonical_url_value" - ) assert response.priority == 898 assert response.region == "region_value" assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetPacketMirroringRequest +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -820,33 +865,44 @@ def test_get_rest_flattened(): return_value = compute.PacketMirroring() # Wrap the value into a proper Response obj - json_return_value = compute.PacketMirroring.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PacketMirroring.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", packet_mirroring="packet_mirroring_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "packet_mirroring_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -866,9 +922,14 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["packet_mirroring_resource"] = compute.PacketMirroring( + collector_ilb=compute.PacketMirroringForwardingRuleInfo( + canonical_url="canonical_url_value" + ) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -878,7 +939,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -896,14 +956,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -914,7 +973,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -932,18 +990,45 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPacketMirroringRequest +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["packet_mirroring_resource"] = compute.PacketMirroring( + collector_ilb=compute.PacketMirroringForwardingRuleInfo( + canonical_url="canonical_url_value" + ) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -951,42 +1036,44 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - packet_mirroring_resource = compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - packet_mirroring_resource=packet_mirroring_resource, + packet_mirroring_resource=compute.PacketMirroring( + collector_ilb=compute.PacketMirroringForwardingRuleInfo( + canonical_url="canonical_url_value" + ) + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.PacketMirroring.to_json( - packet_mirroring_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1010,32 +1097,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PacketMirroringList( id="id_value", - items=[ - compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.PacketMirroringList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PacketMirroringList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1043,25 +1122,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListPacketMirroringsRequest +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1069,30 +1165,36 @@ def test_list_rest_flattened(): return_value = compute.PacketMirroringList() # Wrap the value into a proper Response obj - json_return_value = compute.PacketMirroringList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PacketMirroringList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1104,11 +1206,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.PacketMirroringList( @@ -1138,16 +1242,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.PacketMirroring) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1159,9 +1262,18 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + request_init["packet_mirroring_resource"] = compute.PacketMirroring( + collector_ilb=compute.PacketMirroringForwardingRuleInfo( + canonical_url="canonical_url_value" + ) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1171,7 +1283,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1189,14 +1300,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1207,7 +1317,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1225,18 +1334,49 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPacketMirroringRequest +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + request_init["packet_mirroring_resource"] = compute.PacketMirroring( + collector_ilb=compute.PacketMirroringForwardingRuleInfo( + canonical_url="canonical_url_value" + ) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1244,44 +1384,49 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - packet_mirroring_resource = compute.PacketMirroring( - collector_ilb=compute.PacketMirroringForwardingRuleInfo( - canonical_url="canonical_url_value" - ) - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "packet_mirroring": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", packet_mirroring="packet_mirroring_value", - packet_mirroring_resource=packet_mirroring_resource, + packet_mirroring_resource=compute.PacketMirroring( + collector_ilb=compute.PacketMirroringForwardingRuleInfo( + canonical_url="canonical_url_value" + ) + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "packet_mirroring_value" in http_call[1] + str(body) + str(params) - assert compute.PacketMirroring.to_json( - packet_mirroring_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{packet_mirroring}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1307,9 +1452,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1319,9 +1467,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1331,12 +1479,41 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsPacketMirroringRequest, +): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1344,42 +1521,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = PacketMirroringsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/packetMirrorings/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1477,8 +1659,10 @@ def test_packet_mirrorings_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_packet_mirrorings_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1502,29 +1686,6 @@ def test_packet_mirrorings_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_packet_mirrorings_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.packet_mirrorings.transports.PacketMirroringsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PacketMirroringsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_packet_mirrorings_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1536,7 +1697,6 @@ def test_packet_mirrorings_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_packet_mirrorings_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1552,21 +1712,6 @@ def test_packet_mirrorings_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_packet_mirrorings_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PacketMirroringsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_packet_mirrorings_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1713,3 +1858,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = PacketMirroringsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_projects.py b/tests/unit/gapic/compute_v1/test_projects.py index 52f73c597..a56bb40c9 100644 --- a/tests/unit/gapic/compute_v1/test_projects.py +++ b/tests/unit/gapic/compute_v1/test_projects.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.projects import ProjectsClient from google.cloud.compute_v1.services.projects import pagers from google.cloud.compute_v1.services.projects import transports -from google.cloud.compute_v1.services.projects.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_projects_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_projects_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_projects_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_projects_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_projects_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_projects_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_projects_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_projects_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_projects_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_projects_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_projects_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_projects_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_projects_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_projects_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_projects_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_projects_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_projects_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_projects_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,9 +404,9 @@ def test_disable_xpn_host_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -423,7 +416,6 @@ def test_disable_xpn_host_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -441,14 +433,13 @@ def test_disable_xpn_host_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.disable_xpn_host(request) @@ -459,7 +450,6 @@ def test_disable_xpn_host_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -477,18 +467,40 @@ def test_disable_xpn_host_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_disable_xpn_host_rest_bad_request( + transport: str = "rest", request_type=compute.DisableXpnHostProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_xpn_host(request) + + def test_disable_xpn_host_rest_from_dict(): test_disable_xpn_host_rest(request_type=dict) -def test_disable_xpn_host_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_disable_xpn_host_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,27 +508,36 @@ def test_disable_xpn_host_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.disable_xpn_host(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.disable_xpn_host(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/disableXpnHost" + % client.transport._host, + args[1], + ) -def test_disable_xpn_host_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_disable_xpn_host_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -533,9 +554,14 @@ def test_disable_xpn_resource_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_disable_xpn_resource_request_resource" + ] = compute.ProjectsDisableXpnResourceRequest( + xpn_resource=compute.XpnResourceId(id="id_value") + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -545,7 +571,6 @@ def test_disable_xpn_resource_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -563,14 +588,13 @@ def test_disable_xpn_resource_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.disable_xpn_resource(request) @@ -581,7 +605,6 @@ def test_disable_xpn_resource_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -599,18 +622,45 @@ def test_disable_xpn_resource_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_disable_xpn_resource_rest_bad_request( + transport: str = "rest", request_type=compute.DisableXpnResourceProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_disable_xpn_resource_request_resource" + ] = compute.ProjectsDisableXpnResourceRequest( + xpn_resource=compute.XpnResourceId(id="id_value") + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.disable_xpn_resource(request) + + def test_disable_xpn_resource_rest_from_dict(): test_disable_xpn_resource_rest(request_type=dict) -def test_disable_xpn_resource_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_disable_xpn_resource_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -618,38 +668,41 @@ def test_disable_xpn_resource_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - projects_disable_xpn_resource_request_resource = compute.ProjectsDisableXpnResourceRequest( - xpn_resource=compute.XpnResourceId(id="id_value") - ) - client.disable_xpn_resource( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - projects_disable_xpn_resource_request_resource=projects_disable_xpn_resource_request_resource, + projects_disable_xpn_resource_request_resource=compute.ProjectsDisableXpnResourceRequest( + xpn_resource=compute.XpnResourceId(id="id_value") + ), ) + mock_args.update(sample_request) + client.disable_xpn_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.ProjectsDisableXpnResourceRequest.to_json( - projects_disable_xpn_resource_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_disable_xpn_resource_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/disableXpnResource" + % client.transport._host, + args[1], + ) + + +def test_disable_xpn_resource_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -670,9 +723,9 @@ def test_enable_xpn_host_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -682,7 +735,6 @@ def test_enable_xpn_host_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -700,14 +752,13 @@ def test_enable_xpn_host_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.enable_xpn_host(request) @@ -718,7 +769,6 @@ def test_enable_xpn_host_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -736,18 +786,40 @@ def test_enable_xpn_host_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_enable_xpn_host_rest_bad_request( + transport: str = "rest", request_type=compute.EnableXpnHostProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_xpn_host(request) + + def test_enable_xpn_host_rest_from_dict(): test_enable_xpn_host_rest(request_type=dict) -def test_enable_xpn_host_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_enable_xpn_host_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -755,27 +827,36 @@ def test_enable_xpn_host_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.enable_xpn_host(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.enable_xpn_host(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/enableXpnHost" + % client.transport._host, + args[1], + ) -def test_enable_xpn_host_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_enable_xpn_host_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -792,9 +873,14 @@ def test_enable_xpn_resource_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_enable_xpn_resource_request_resource" + ] = compute.ProjectsEnableXpnResourceRequest( + xpn_resource=compute.XpnResourceId(id="id_value") + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -804,7 +890,6 @@ def test_enable_xpn_resource_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -822,14 +907,13 @@ def test_enable_xpn_resource_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.enable_xpn_resource(request) @@ -840,7 +924,6 @@ def test_enable_xpn_resource_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -858,18 +941,45 @@ def test_enable_xpn_resource_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_enable_xpn_resource_rest_bad_request( + transport: str = "rest", request_type=compute.EnableXpnResourceProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_enable_xpn_resource_request_resource" + ] = compute.ProjectsEnableXpnResourceRequest( + xpn_resource=compute.XpnResourceId(id="id_value") + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.enable_xpn_resource(request) + + def test_enable_xpn_resource_rest_from_dict(): test_enable_xpn_resource_rest(request_type=dict) -def test_enable_xpn_resource_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_enable_xpn_resource_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -877,38 +987,41 @@ def test_enable_xpn_resource_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - projects_enable_xpn_resource_request_resource = compute.ProjectsEnableXpnResourceRequest( - xpn_resource=compute.XpnResourceId(id="id_value") - ) - client.enable_xpn_resource( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - projects_enable_xpn_resource_request_resource=projects_enable_xpn_resource_request_resource, + projects_enable_xpn_resource_request_resource=compute.ProjectsEnableXpnResourceRequest( + xpn_resource=compute.XpnResourceId(id="id_value") + ), ) + mock_args.update(sample_request) + client.enable_xpn_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.ProjectsEnableXpnResourceRequest.to_json( - projects_enable_xpn_resource_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_enable_xpn_resource_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/enableXpnResource" + % client.transport._host, + args[1], + ) + + +def test_enable_xpn_resource_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -927,15 +1040,14 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetProjectReques credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Project( - common_instance_metadata=compute.Metadata(fingerprint="fingerprint_value"), creation_timestamp="creation_timestamp_value", default_network_tier=compute.Project.DefaultNetworkTier.PREMIUM, default_service_account="default_service_account_value", @@ -944,27 +1056,20 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetProjectReques id=205, kind="kind_value", name="name_value", - quotas=[compute.Quota(limit=0.543)], self_link="self_link_value", - usage_export_location=compute.UsageExportLocation( - bucket_name="bucket_name_value" - ), xpn_project_status=compute.Project.XpnProjectStatus.HOST, ) # Wrap the value into a proper Response obj - json_return_value = compute.Project.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Project.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Project) - assert response.common_instance_metadata == compute.Metadata( - fingerprint="fingerprint_value" - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_network_tier == compute.Project.DefaultNetworkTier.PREMIUM assert response.default_service_account == "default_service_account_value" @@ -973,20 +1078,41 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetProjectReques assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.quotas == [compute.Quota(limit=0.543)] assert response.self_link == "self_link_value" - assert response.usage_export_location == compute.UsageExportLocation( - bucket_name="bucket_name_value" - ) assert response.xpn_project_status == compute.Project.XpnProjectStatus.HOST +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -994,27 +1120,34 @@ def test_get_rest_flattened(): return_value = compute.Project() # Wrap the value into a proper Response obj - json_return_value = compute.Project.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Project.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}" % client.transport._host, args[1] + ) -def test_get_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1031,15 +1164,14 @@ def test_get_xpn_host_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Project( - common_instance_metadata=compute.Metadata(fingerprint="fingerprint_value"), creation_timestamp="creation_timestamp_value", default_network_tier=compute.Project.DefaultNetworkTier.PREMIUM, default_service_account="default_service_account_value", @@ -1048,27 +1180,20 @@ def test_get_xpn_host_rest( id=205, kind="kind_value", name="name_value", - quotas=[compute.Quota(limit=0.543)], self_link="self_link_value", - usage_export_location=compute.UsageExportLocation( - bucket_name="bucket_name_value" - ), xpn_project_status=compute.Project.XpnProjectStatus.HOST, ) # Wrap the value into a proper Response obj - json_return_value = compute.Project.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Project.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_xpn_host(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Project) - assert response.common_instance_metadata == compute.Metadata( - fingerprint="fingerprint_value" - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_network_tier == compute.Project.DefaultNetworkTier.PREMIUM assert response.default_service_account == "default_service_account_value" @@ -1077,20 +1202,41 @@ def test_get_xpn_host_rest( assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.quotas == [compute.Quota(limit=0.543)] assert response.self_link == "self_link_value" - assert response.usage_export_location == compute.UsageExportLocation( - bucket_name="bucket_name_value" - ) assert response.xpn_project_status == compute.Project.XpnProjectStatus.HOST +def test_get_xpn_host_rest_bad_request( + transport: str = "rest", request_type=compute.GetXpnHostProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_xpn_host(request) + + def test_get_xpn_host_rest_from_dict(): test_get_xpn_host_rest(request_type=dict) -def test_get_xpn_host_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_xpn_host_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1098,27 +1244,36 @@ def test_get_xpn_host_rest_flattened(): return_value = compute.Project() # Wrap the value into a proper Response obj - json_return_value = compute.Project.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Project.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_xpn_host(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.get_xpn_host(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/getXpnHost" + % client.transport._host, + args[1], + ) -def test_get_xpn_host_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_xpn_host_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1135,23 +1290,21 @@ def test_get_xpn_resources_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ProjectsGetXpnResources( - kind="kind_value", - next_page_token="next_page_token_value", - resources=[compute.XpnResourceId(id="id_value")], + kind="kind_value", next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.ProjectsGetXpnResources.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ProjectsGetXpnResources.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_xpn_resources(request) @@ -1160,15 +1313,39 @@ def test_get_xpn_resources_rest( assert isinstance(response, pagers.GetXpnResourcesPager) assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.resources == [compute.XpnResourceId(id="id_value")] + + +def test_get_xpn_resources_rest_bad_request( + transport: str = "rest", request_type=compute.GetXpnResourcesProjectsRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_xpn_resources(request) def test_get_xpn_resources_rest_from_dict(): test_get_xpn_resources_rest(request_type=dict) -def test_get_xpn_resources_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_xpn_resources_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1176,27 +1353,36 @@ def test_get_xpn_resources_rest_flattened(): return_value = compute.ProjectsGetXpnResources() # Wrap the value into a proper Response obj - json_return_value = compute.ProjectsGetXpnResources.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ProjectsGetXpnResources.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_xpn_resources(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.get_xpn_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/getXpnResources" + % client.transport._host, + args[1], + ) -def test_get_xpn_resources_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_xpn_resources_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1206,11 +1392,13 @@ def test_get_xpn_resources_rest_flattened_error(): ) -def test_get_xpn_resources_pager(): +def test_get_xpn_resources_rest_pager(): client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ProjectsGetXpnResources( @@ -1240,16 +1428,15 @@ def test_get_xpn_resources_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.get_xpn_resources(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.get_xpn_resources(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.XpnResourceId) for i in results) - pages = list(client.get_xpn_resources(request={}).pages) + pages = list(client.get_xpn_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1261,32 +1448,27 @@ def test_list_xpn_hosts_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_list_xpn_hosts_request_resource" + ] = compute.ProjectsListXpnHostsRequest(organization="organization_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.XpnHostList( id="id_value", - items=[ - compute.Project( - common_instance_metadata=compute.Metadata( - fingerprint="fingerprint_value" - ) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.XpnHostList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.XpnHostList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_xpn_hosts(request) @@ -1294,23 +1476,45 @@ def test_list_xpn_hosts_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListXpnHostsPager) assert response.id == "id_value" - assert response.items == [ - compute.Project( - common_instance_metadata=compute.Metadata(fingerprint="fingerprint_value") - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) -def test_list_xpn_hosts_rest_from_dict(): - test_list_xpn_hosts_rest(request_type=dict) +def test_list_xpn_hosts_rest_bad_request( + transport: str = "rest", request_type=compute.ListXpnHostsProjectsRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_list_xpn_hosts_request_resource" + ] = compute.ProjectsListXpnHostsRequest(organization="organization_value") + request = request_type(request_init) -def test_list_xpn_hosts_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_xpn_hosts(request) + + +def test_list_xpn_hosts_rest_from_dict(): + test_list_xpn_hosts_rest(request_type=dict) + + +def test_list_xpn_hosts_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1318,38 +1522,41 @@ def test_list_xpn_hosts_rest_flattened(): return_value = compute.XpnHostList() # Wrap the value into a proper Response obj - json_return_value = compute.XpnHostList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.XpnHostList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - projects_list_xpn_hosts_request_resource = compute.ProjectsListXpnHostsRequest( - organization="organization_value" - ) - client.list_xpn_hosts( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - projects_list_xpn_hosts_request_resource=projects_list_xpn_hosts_request_resource, + projects_list_xpn_hosts_request_resource=compute.ProjectsListXpnHostsRequest( + organization="organization_value" + ), ) + mock_args.update(sample_request) + client.list_xpn_hosts(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.ProjectsListXpnHostsRequest.to_json( - projects_list_xpn_hosts_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_list_xpn_hosts_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/listXpnHosts" + % client.transport._host, + args[1], + ) + + +def test_list_xpn_hosts_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1363,11 +1570,13 @@ def test_list_xpn_hosts_rest_flattened_error(): ) -def test_list_xpn_hosts_pager(): +def test_list_xpn_hosts_rest_pager(): client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.XpnHostList( @@ -1389,16 +1598,18 @@ def test_list_xpn_hosts_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_xpn_hosts(request={}) + sample_request = {"project": "sample1"} + sample_request[ + "projects_list_xpn_hosts_request_resource" + ] = compute.ProjectsListXpnHostsRequest(organization="organization_value") - assert pager._metadata == metadata + pager = client.list_xpn_hosts(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Project) for i in results) - pages = list(client.list_xpn_hosts(request={}).pages) + pages = list(client.list_xpn_hosts(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1410,9 +1621,12 @@ def test_move_disk_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["disk_move_request_resource"] = compute.DiskMoveRequest( + destination_zone="destination_zone_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1422,7 +1636,6 @@ def test_move_disk_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1440,14 +1653,13 @@ def test_move_disk_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.move_disk(request) @@ -1458,7 +1670,6 @@ def test_move_disk_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1476,18 +1687,43 @@ def test_move_disk_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_move_disk_rest_bad_request( + transport: str = "rest", request_type=compute.MoveDiskProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["disk_move_request_resource"] = compute.DiskMoveRequest( + destination_zone="destination_zone_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_disk(request) + + def test_move_disk_rest_from_dict(): test_move_disk_rest(request_type=dict) -def test_move_disk_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_move_disk_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1495,38 +1731,41 @@ def test_move_disk_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - disk_move_request_resource = compute.DiskMoveRequest( - destination_zone="destination_zone_value" - ) - client.move_disk( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - disk_move_request_resource=disk_move_request_resource, + disk_move_request_resource=compute.DiskMoveRequest( + destination_zone="destination_zone_value" + ), ) + mock_args.update(sample_request) + client.move_disk(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.DiskMoveRequest.to_json( - disk_move_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_move_disk_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/moveDisk" + % client.transport._host, + args[1], + ) + + +def test_move_disk_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1547,9 +1786,12 @@ def test_move_instance_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["instance_move_request_resource"] = compute.InstanceMoveRequest( + destination_zone="destination_zone_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1559,7 +1801,6 @@ def test_move_instance_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1577,14 +1818,13 @@ def test_move_instance_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.move_instance(request) @@ -1595,7 +1835,6 @@ def test_move_instance_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1613,18 +1852,43 @@ def test_move_instance_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_move_instance_rest_bad_request( + transport: str = "rest", request_type=compute.MoveInstanceProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["instance_move_request_resource"] = compute.InstanceMoveRequest( + destination_zone="destination_zone_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.move_instance(request) + + def test_move_instance_rest_from_dict(): test_move_instance_rest(request_type=dict) -def test_move_instance_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_move_instance_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1632,38 +1896,41 @@ def test_move_instance_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_move_request_resource = compute.InstanceMoveRequest( - destination_zone="destination_zone_value" - ) - client.move_instance( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - instance_move_request_resource=instance_move_request_resource, + instance_move_request_resource=compute.InstanceMoveRequest( + destination_zone="destination_zone_value" + ), ) + mock_args.update(sample_request) + client.move_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceMoveRequest.to_json( - instance_move_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_move_instance_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/moveInstance" + % client.transport._host, + args[1], + ) + + +def test_move_instance_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1685,9 +1952,12 @@ def test_set_common_instance_metadata_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["metadata_resource"] = compute.Metadata( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1697,7 +1967,6 @@ def test_set_common_instance_metadata_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1715,14 +1984,13 @@ def test_set_common_instance_metadata_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_common_instance_metadata(request) @@ -1733,7 +2001,6 @@ def test_set_common_instance_metadata_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1751,18 +2018,44 @@ def test_set_common_instance_metadata_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_common_instance_metadata_rest_bad_request( + transport: str = "rest", + request_type=compute.SetCommonInstanceMetadataProjectRequest, +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["metadata_resource"] = compute.Metadata( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_common_instance_metadata(request) + + def test_set_common_instance_metadata_rest_from_dict(): test_set_common_instance_metadata_rest(request_type=dict) -def test_set_common_instance_metadata_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_common_instance_metadata_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1770,35 +2063,39 @@ def test_set_common_instance_metadata_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - metadata_resource = compute.Metadata(fingerprint="fingerprint_value") - client.set_common_instance_metadata( - project="project_value", metadata_resource=metadata_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + metadata_resource=compute.Metadata(fingerprint="fingerprint_value"), ) + mock_args.update(sample_request) + client.set_common_instance_metadata(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Metadata.to_json( - metadata_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_common_instance_metadata_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/setCommonInstanceMetadata" + % client.transport._host, + args[1], + ) + + +def test_set_common_instance_metadata_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1817,9 +2114,14 @@ def test_set_default_network_tier_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_set_default_network_tier_request_resource" + ] = compute.ProjectsSetDefaultNetworkTierRequest( + network_tier=compute.ProjectsSetDefaultNetworkTierRequest.NetworkTier.PREMIUM + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1829,7 +2131,6 @@ def test_set_default_network_tier_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1847,14 +2148,13 @@ def test_set_default_network_tier_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_default_network_tier(request) @@ -1865,7 +2165,6 @@ def test_set_default_network_tier_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1883,18 +2182,45 @@ def test_set_default_network_tier_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_default_network_tier_rest_bad_request( + transport: str = "rest", request_type=compute.SetDefaultNetworkTierProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init[ + "projects_set_default_network_tier_request_resource" + ] = compute.ProjectsSetDefaultNetworkTierRequest( + network_tier=compute.ProjectsSetDefaultNetworkTierRequest.NetworkTier.PREMIUM + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_default_network_tier(request) + + def test_set_default_network_tier_rest_from_dict(): test_set_default_network_tier_rest(request_type=dict) -def test_set_default_network_tier_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_default_network_tier_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1902,38 +2228,41 @@ def test_set_default_network_tier_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - projects_set_default_network_tier_request_resource = compute.ProjectsSetDefaultNetworkTierRequest( - network_tier=compute.ProjectsSetDefaultNetworkTierRequest.NetworkTier.PREMIUM - ) - client.set_default_network_tier( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - projects_set_default_network_tier_request_resource=projects_set_default_network_tier_request_resource, + projects_set_default_network_tier_request_resource=compute.ProjectsSetDefaultNetworkTierRequest( + network_tier=compute.ProjectsSetDefaultNetworkTierRequest.NetworkTier.PREMIUM + ), ) + mock_args.update(sample_request) + client.set_default_network_tier(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.ProjectsSetDefaultNetworkTierRequest.to_json( - projects_set_default_network_tier_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_default_network_tier_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/setDefaultNetworkTier" + % client.transport._host, + args[1], + ) + + +def test_set_default_network_tier_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1954,9 +2283,12 @@ def test_set_usage_export_bucket_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["usage_export_location_resource"] = compute.UsageExportLocation( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1966,7 +2298,6 @@ def test_set_usage_export_bucket_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1984,14 +2315,13 @@ def test_set_usage_export_bucket_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_usage_export_bucket(request) @@ -2002,7 +2332,6 @@ def test_set_usage_export_bucket_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2020,18 +2349,43 @@ def test_set_usage_export_bucket_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_usage_export_bucket_rest_bad_request( + transport: str = "rest", request_type=compute.SetUsageExportBucketProjectRequest +): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["usage_export_location_resource"] = compute.UsageExportLocation( + bucket_name="bucket_name_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_usage_export_bucket(request) + + def test_set_usage_export_bucket_rest_from_dict(): test_set_usage_export_bucket_rest(request_type=dict) -def test_set_usage_export_bucket_rest_flattened(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_usage_export_bucket_rest_flattened(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2039,38 +2393,41 @@ def test_set_usage_export_bucket_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - usage_export_location_resource = compute.UsageExportLocation( - bucket_name="bucket_name_value" - ) - client.set_usage_export_bucket( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - usage_export_location_resource=usage_export_location_resource, + usage_export_location_resource=compute.UsageExportLocation( + bucket_name="bucket_name_value" + ), ) + mock_args.update(sample_request) + client.set_usage_export_bucket(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.UsageExportLocation.to_json( - usage_export_location_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_usage_export_bucket_rest_flattened_error(): - client = ProjectsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/setUsageExportBucket" + % client.transport._host, + args[1], + ) + + +def test_set_usage_export_bucket_rest_flattened_error(transport: str = "rest"): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2172,8 +2529,10 @@ def test_projects_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_projects_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2197,29 +2556,6 @@ def test_projects_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_projects_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.projects.transports.ProjectsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ProjectsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_projects_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2231,7 +2567,6 @@ def test_projects_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_projects_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2247,21 +2582,6 @@ def test_projects_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_projects_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ProjectsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_projects_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2408,3 +2728,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ProjectsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py b/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py index a37158ba4..c67c83d2e 100644 --- a/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py +++ b/tests/unit/gapic/compute_v1/test_public_advertised_prefixes.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.public_advertised_prefixes import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.public_advertised_prefixes import pagers from google.cloud.compute_v1.services.public_advertised_prefixes import transports -from google.cloud.compute_v1.services.public_advertised_prefixes.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -208,7 +192,7 @@ def test_public_advertised_prefixes_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -217,6 +201,7 @@ def test_public_advertised_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -224,7 +209,7 @@ def test_public_advertised_prefixes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -233,6 +218,7 @@ def test_public_advertised_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -240,7 +226,7 @@ def test_public_advertised_prefixes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -249,6 +235,7 @@ def test_public_advertised_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -268,7 +255,7 @@ def test_public_advertised_prefixes_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -277,6 +264,7 @@ def test_public_advertised_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -319,7 +307,7 @@ def test_public_advertised_prefixes_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -336,6 +324,7 @@ def test_public_advertised_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -360,7 +349,7 @@ def test_public_advertised_prefixes_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -369,6 +358,7 @@ def test_public_advertised_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -381,7 +371,7 @@ def test_public_advertised_prefixes_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -390,6 +380,7 @@ def test_public_advertised_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -410,7 +401,7 @@ def test_public_advertised_prefixes_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -419,6 +410,7 @@ def test_public_advertised_prefixes_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -439,7 +431,7 @@ def test_public_advertised_prefixes_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -448,6 +440,7 @@ def test_public_advertised_prefixes_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -458,9 +451,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -470,7 +463,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -488,14 +480,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -506,7 +497,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -524,19 +514,39 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -545,34 +555,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", public_advertised_prefix="public_advertised_prefix_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "public_advertised_prefix_value" in http_call[1] + str(body) + str( - params + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + % client.transport._host, + args[1], ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -592,9 +606,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -608,20 +622,15 @@ def test_get_rest( ip_cidr_range="ip_cidr_range_value", kind="kind_value", name="name_value", - public_delegated_prefixs=[ - compute.PublicAdvertisedPrefixPublicDelegatedPrefix( - ip_range="ip_range_value" - ) - ], self_link="self_link_value", shared_secret="shared_secret_value", status=compute.PublicAdvertisedPrefix.Status.INITIAL, ) # Wrap the value into a proper Response obj - json_return_value = compute.PublicAdvertisedPrefix.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicAdvertisedPrefix.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -636,21 +645,41 @@ def test_get_rest( assert response.ip_cidr_range == "ip_cidr_range_value" assert response.kind == "kind_value" assert response.name == "name_value" - assert response.public_delegated_prefixs == [ - compute.PublicAdvertisedPrefixPublicDelegatedPrefix(ip_range="ip_range_value") - ] assert response.self_link == "self_link_value" assert response.shared_secret == "shared_secret_value" assert response.status == compute.PublicAdvertisedPrefix.Status.INITIAL +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetPublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -659,34 +688,38 @@ def test_get_rest_flattened(): return_value = compute.PublicAdvertisedPrefix() # Wrap the value into a proper Response obj - json_return_value = compute.PublicAdvertisedPrefix.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicAdvertisedPrefix.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", public_advertised_prefix="public_advertised_prefix_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "public_advertised_prefix_value" in http_call[1] + str(body) + str( - params + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + % client.transport._host, + args[1], ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -706,9 +739,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["public_advertised_prefix_resource"] = compute.PublicAdvertisedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -718,7 +754,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -736,14 +771,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -754,7 +788,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -772,19 +805,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["public_advertised_prefix_resource"] = compute.PublicAdvertisedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -793,39 +849,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - public_advertised_prefix_resource = compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - public_advertised_prefix_resource=public_advertised_prefix_resource, + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.PublicAdvertisedPrefix.to_json( - public_advertised_prefix_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -847,30 +904,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicAdvertisedPrefixList( id="id_value", - items=[ - compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.PublicAdvertisedPrefixList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicAdvertisedPrefixList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -878,22 +929,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.PublicAdvertisedPrefix(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListPublicAdvertisedPrefixesRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -902,28 +972,35 @@ def test_list_rest_flattened(): return_value = compute.PublicAdvertisedPrefixList() # Wrap the value into a proper Response obj - json_return_value = compute.PublicAdvertisedPrefixList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicAdvertisedPrefixList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -934,13 +1011,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = PublicAdvertisedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.PublicAdvertisedPrefixList( @@ -975,16 +1054,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.PublicAdvertisedPrefix) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -996,9 +1074,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request_init["public_advertised_prefix_resource"] = compute.PublicAdvertisedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1008,7 +1089,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1026,14 +1106,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1044,7 +1123,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1062,19 +1140,42 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPublicAdvertisedPrefixeRequest +): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "public_advertised_prefix": "sample2"} + request_init["public_advertised_prefix_resource"] = compute.PublicAdvertisedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1083,43 +1184,41 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - public_advertised_prefix_resource = compute.PublicAdvertisedPrefix( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "public_advertised_prefix": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", public_advertised_prefix="public_advertised_prefix_value", - public_advertised_prefix_resource=public_advertised_prefix_resource, + public_advertised_prefix_resource=compute.PublicAdvertisedPrefix( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "public_advertised_prefix_value" in http_call[1] + str(body) + str( - params + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/publicAdvertisedPrefixes/{public_advertised_prefix}" + % client.transport._host, + args[1], ) - assert compute.PublicAdvertisedPrefix.to_json( - public_advertised_prefix_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) -def test_patch_rest_flattened_error(): +def test_patch_rest_flattened_error(transport: str = "rest"): client = PublicAdvertisedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1217,8 +1316,10 @@ def test_public_advertised_prefixes_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_public_advertised_prefixes_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1242,29 +1343,6 @@ def test_public_advertised_prefixes_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_public_advertised_prefixes_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.public_advertised_prefixes.transports.PublicAdvertisedPrefixesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PublicAdvertisedPrefixesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_public_advertised_prefixes_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1276,7 +1354,6 @@ def test_public_advertised_prefixes_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_public_advertised_prefixes_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1292,21 +1369,6 @@ def test_public_advertised_prefixes_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_public_advertised_prefixes_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PublicAdvertisedPrefixesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_public_advertised_prefixes_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1453,3 +1515,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = PublicAdvertisedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py b/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py index cc8ee28f4..64bf1101e 100644 --- a/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py +++ b/tests/unit/gapic/compute_v1/test_public_delegated_prefixes.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.public_delegated_prefixes import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.public_delegated_prefixes import pagers from google.cloud.compute_v1.services.public_delegated_prefixes import transports -from google.cloud.compute_v1.services.public_delegated_prefixes.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_public_delegated_prefixes_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_public_delegated_prefixes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_public_delegated_prefixes_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_public_delegated_prefixes_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_public_delegated_prefixes_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_public_delegated_prefixes_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_public_delegated_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_public_delegated_prefixes_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_public_delegated_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_public_delegated_prefixes_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_public_delegated_prefixes_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_public_delegated_prefixes_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_public_delegated_prefixes_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_public_delegated_prefixes_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_public_delegated_prefixes_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -455,37 +448,27 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicDelegatedPrefixAggregatedList( id="id_value", - items={ - "key_value": compute.PublicDelegatedPrefixesScopedList( - public_delegated_prefixes=[ - compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.PublicDelegatedPrefixAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -493,29 +476,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.PublicDelegatedPrefixesScopedList( - public_delegated_prefixes=[ - compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", + request_type=compute.AggregatedListPublicDelegatedPrefixesRequest, +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): +def test_aggregated_list_rest_flattened(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -524,30 +521,37 @@ def test_aggregated_list_rest_flattened(): return_value = compute.PublicDelegatedPrefixAggregatedList() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.PublicDelegatedPrefixAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -559,13 +563,15 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.PublicDelegatedPrefixAggregatedList( @@ -603,10 +609,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.PublicDelegatedPrefixesScopedList) assert pager.get("h") is None @@ -624,7 +629,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.PublicDelegatedPrefixesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -636,9 +641,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -648,7 +657,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -666,14 +674,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -684,7 +691,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -702,19 +708,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeletePublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -723,34 +753,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "public_delegated_prefix_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -771,9 +810,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -788,20 +831,15 @@ def test_get_rest( kind="kind_value", name="name_value", parent_prefix="parent_prefix_value", - public_delegated_sub_prefixs=[ - compute.PublicDelegatedPrefixPublicDelegatedSubPrefix( - delegatee_project="delegatee_project_value" - ) - ], region="region_value", self_link="self_link_value", status=compute.PublicDelegatedPrefix.Status.ANNOUNCED, ) # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -817,23 +855,45 @@ def test_get_rest( assert response.kind == "kind_value" assert response.name == "name_value" assert response.parent_prefix == "parent_prefix_value" - assert response.public_delegated_sub_prefixs == [ - compute.PublicDelegatedPrefixPublicDelegatedSubPrefix( - delegatee_project="delegatee_project_value" - ) - ] assert response.region == "region_value" assert response.self_link == "self_link_value" assert response.status == compute.PublicDelegatedPrefix.Status.ANNOUNCED +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetPublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -842,34 +902,43 @@ def test_get_rest_flattened(): return_value = compute.PublicDelegatedPrefix() # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefix.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "public_delegated_prefix_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -890,9 +959,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -902,7 +974,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -920,14 +991,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -938,7 +1008,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -956,19 +1025,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertPublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -977,41 +1069,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - public_delegated_prefix_resource = compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - public_delegated_prefix_resource=public_delegated_prefix_resource, + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.PublicDelegatedPrefix.to_json( - public_delegated_prefix_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1034,30 +1126,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.PublicDelegatedPrefixList( id="id_value", - items=[ - compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1065,22 +1151,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.PublicDelegatedPrefix(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListPublicDelegatedPrefixesRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1089,31 +1194,35 @@ def test_list_rest_flattened(): return_value = compute.PublicDelegatedPrefixList() # Wrap the value into a proper Response obj - json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.PublicDelegatedPrefixList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1126,13 +1235,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = PublicDelegatedPrefixesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.PublicDelegatedPrefixList( @@ -1165,16 +1276,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.PublicDelegatedPrefix) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1186,9 +1296,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1198,7 +1315,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1216,14 +1332,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1234,7 +1349,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1252,19 +1366,46 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchPublicDelegatedPrefixeRequest +): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + request_init["public_delegated_prefix_resource"] = compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1273,43 +1414,46 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - public_delegated_prefix_resource = compute.PublicDelegatedPrefix( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "public_delegated_prefix": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", public_delegated_prefix="public_delegated_prefix_value", - public_delegated_prefix_resource=public_delegated_prefix_resource, + public_delegated_prefix_resource=compute.PublicDelegatedPrefix( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "public_delegated_prefix_value" in http_call[1] + str(body) + str(params) - assert compute.PublicDelegatedPrefix.to_json( - public_delegated_prefix_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/publicDelegatedPrefixes/{public_delegated_prefix}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = PublicDelegatedPrefixesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1409,8 +1553,10 @@ def test_public_delegated_prefixes_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_public_delegated_prefixes_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1434,29 +1580,6 @@ def test_public_delegated_prefixes_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_public_delegated_prefixes_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.public_delegated_prefixes.transports.PublicDelegatedPrefixesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.PublicDelegatedPrefixesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_public_delegated_prefixes_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1468,7 +1591,6 @@ def test_public_delegated_prefixes_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_public_delegated_prefixes_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1484,21 +1606,6 @@ def test_public_delegated_prefixes_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_public_delegated_prefixes_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - PublicDelegatedPrefixesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_public_delegated_prefixes_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1645,3 +1752,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = PublicDelegatedPrefixesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_autoscalers.py b/tests/unit/gapic/compute_v1/test_region_autoscalers.py index a1702fecf..7db0bed21 100644 --- a/tests/unit/gapic/compute_v1/test_region_autoscalers.py +++ b/tests/unit/gapic/compute_v1/test_region_autoscalers.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_autoscalers import RegionAutoscalersClient from google.cloud.compute_v1.services.region_autoscalers import pagers from google.cloud.compute_v1.services.region_autoscalers import transports -from google.cloud.compute_v1.services.region_autoscalers.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_region_autoscalers_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_region_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_region_autoscalers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_region_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_region_autoscalers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_region_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_region_autoscalers_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_region_autoscalers_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_region_autoscalers_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_region_autoscalers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_region_autoscalers_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_region_autoscalers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_region_autoscalers_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_region_autoscalers_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_region_autoscalers_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_region_autoscalers_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_region_autoscalers_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_region_autoscalers_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +439,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +456,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -482,7 +473,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +490,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionAutoscalerRequest +): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,33 +531,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "autoscaler": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", autoscaler="autoscaler_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "autoscaler_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -565,15 +588,14 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112), creation_timestamp="creation_timestamp_value", description="description_value", id=205, @@ -581,31 +603,22 @@ def test_get_rest( name="name_value", recommended_size=1693, region="region_value", - scaling_schedule_status={ - "key_value": compute.ScalingScheduleStatus( - last_start_time="last_start_time_value" - ) - }, self_link="self_link_value", status=compute.Autoscaler.Status.ACTIVE, - status_details=[compute.AutoscalerStatusDetails(message="message_value")], target="target_value", zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Autoscaler.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Autoscaler.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Autoscaler) - assert response.autoscaling_policy == compute.AutoscalingPolicy( - cool_down_period_sec=2112 - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.id == 205 @@ -613,26 +626,43 @@ def test_get_rest( assert response.name == "name_value" assert response.recommended_size == 1693 assert response.region == "region_value" - assert response.scaling_schedule_status == { - "key_value": compute.ScalingScheduleStatus( - last_start_time="last_start_time_value" - ) - } assert response.self_link == "self_link_value" assert response.status == compute.Autoscaler.Status.ACTIVE - assert response.status_details == [ - compute.AutoscalerStatusDetails(message="message_value") - ] assert response.target == "target_value" assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionAutoscalerRequest +): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "autoscaler": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -640,33 +670,44 @@ def test_get_rest_flattened(): return_value = compute.Autoscaler() # Wrap the value into a proper Response obj - json_return_value = compute.Autoscaler.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Autoscaler.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "autoscaler": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", autoscaler="autoscaler_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "autoscaler_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/autoscalers/{autoscaler}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -686,9 +727,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -698,7 +742,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -716,14 +759,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -734,7 +776,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -752,18 +793,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionAutoscalerRequest +): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -771,40 +837,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - autoscaler_resource = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - autoscaler_resource=autoscaler_resource, + autoscaler_resource=compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Autoscaler.to_json( - autoscaler_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/autoscalers" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -826,32 +894,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionAutoscalerList( id="id_value", - items=[ - compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy( - cool_down_period_sec=2112 - ) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RegionAutoscalerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionAutoscalerList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -859,23 +919,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionAutoscalersRequest +): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -883,30 +962,36 @@ def test_list_rest_flattened(): return_value = compute.RegionAutoscalerList() # Wrap the value into a proper Response obj - json_return_value = compute.RegionAutoscalerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionAutoscalerList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/autoscalers" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -918,11 +1003,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionAutoscalerList( @@ -952,16 +1039,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Autoscaler) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -973,9 +1059,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -985,7 +1074,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1003,14 +1091,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1021,7 +1108,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1039,18 +1125,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionAutoscalerRequest +): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1058,40 +1169,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - autoscaler_resource = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - autoscaler_resource=autoscaler_resource, + autoscaler_resource=compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Autoscaler.to_json( - autoscaler_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/autoscalers" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1113,9 +1226,12 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1125,7 +1241,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1143,14 +1258,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1161,7 +1275,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1179,18 +1292,43 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateRegionAutoscalerRequest +): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["autoscaler_resource"] = compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1198,40 +1336,42 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - autoscaler_resource = compute.Autoscaler( - autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) - ) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - autoscaler_resource=autoscaler_resource, + autoscaler_resource=compute.Autoscaler( + autoscaling_policy=compute.AutoscalingPolicy(cool_down_period_sec=2112) + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Autoscaler.to_json( - autoscaler_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = RegionAutoscalersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/autoscalers" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1329,8 +1469,10 @@ def test_region_autoscalers_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_autoscalers_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1354,29 +1496,6 @@ def test_region_autoscalers_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_autoscalers_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_autoscalers.transports.RegionAutoscalersTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionAutoscalersTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_autoscalers_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1388,7 +1507,6 @@ def test_region_autoscalers_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_autoscalers_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1404,21 +1522,6 @@ def test_region_autoscalers_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_autoscalers_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionAutoscalersClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_autoscalers_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1565,3 +1668,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionAutoscalersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_backend_services.py b/tests/unit/gapic/compute_v1/test_region_backend_services.py index 3121e0cfb..d4eb7d9d0 100644 --- a/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_backend_services import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_backend_services import pagers from google.cloud.compute_v1.services.region_backend_services import transports -from google.cloud.compute_v1.services.region_backend_services.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_region_backend_services_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_region_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_region_backend_services_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_region_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_region_backend_services_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_region_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_region_backend_services_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_region_backend_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_region_backend_services_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_region_backend_services_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_region_backend_services_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_region_backend_services_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_region_backend_services_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_region_backend_services_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_region_backend_services_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_region_backend_services_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_region_backend_services_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_region_backend_services_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -454,9 +447,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -466,7 +463,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -484,14 +480,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -502,7 +497,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -520,19 +514,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -541,34 +559,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", backend_service="backend_service_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -589,74 +616,46 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendService( affinity_cookie_ttl_sec=2432, - backends=[ - compute.Backend(balancing_mode=compute.Backend.BalancingMode.CONNECTION) - ], - cdn_policy=compute.BackendServiceCdnPolicy( - bypass_cache_on_request_headers=[ - compute.BackendServiceCdnPolicyBypassCacheOnRequestHeader( - header_name="header_name_value" - ) - ] - ), - circuit_breakers=compute.CircuitBreakers(max_connections=1608), - connection_draining=compute.ConnectionDraining(draining_timeout_sec=2124), - consistent_hash=compute.ConsistentHashLoadBalancerSettings( - http_cookie=compute.ConsistentHashLoadBalancerSettingsHttpCookie( - name="name_value" - ) - ), creation_timestamp="creation_timestamp_value", custom_request_headers=["custom_request_headers_value"], custom_response_headers=["custom_response_headers_value"], description="description_value", enable_c_d_n=True, - failover_policy=compute.BackendServiceFailoverPolicy( - disable_connection_drain_on_failover=True - ), fingerprint="fingerprint_value", health_checks=["health_checks_value"], - iap=compute.BackendServiceIAP(enabled=True), id=205, kind="kind_value", load_balancing_scheme=compute.BackendService.LoadBalancingScheme.EXTERNAL, locality_lb_policy=compute.BackendService.LocalityLbPolicy.INVALID_LB_POLICY, - log_config=compute.BackendServiceLogConfig(enable=True), - max_stream_duration=compute.Duration(nanos=543), name="name_value", network="network_value", - outlier_detection=compute.OutlierDetection( - base_ejection_time=compute.Duration(nanos=543) - ), port=453, port_name="port_name_value", protocol=compute.BackendService.Protocol.GRPC, region="region_value", security_policy="security_policy_value", - security_settings=compute.SecuritySettings( - client_tls_policy="client_tls_policy_value" - ), self_link="self_link_value", session_affinity=compute.BackendService.SessionAffinity.CLIENT_IP, - subsetting=compute.Subsetting( - policy=compute.Subsetting.Policy.CONSISTENT_HASH_SUBSETTING - ), timeout_sec=1185, ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendService.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendService.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -664,36 +663,13 @@ def test_get_rest( # Establish that the response is the type that we expect. assert isinstance(response, compute.BackendService) assert response.affinity_cookie_ttl_sec == 2432 - assert response.backends == [ - compute.Backend(balancing_mode=compute.Backend.BalancingMode.CONNECTION) - ] - assert response.cdn_policy == compute.BackendServiceCdnPolicy( - bypass_cache_on_request_headers=[ - compute.BackendServiceCdnPolicyBypassCacheOnRequestHeader( - header_name="header_name_value" - ) - ] - ) - assert response.circuit_breakers == compute.CircuitBreakers(max_connections=1608) - assert response.connection_draining == compute.ConnectionDraining( - draining_timeout_sec=2124 - ) - assert response.consistent_hash == compute.ConsistentHashLoadBalancerSettings( - http_cookie=compute.ConsistentHashLoadBalancerSettingsHttpCookie( - name="name_value" - ) - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.custom_request_headers == ["custom_request_headers_value"] assert response.custom_response_headers == ["custom_response_headers_value"] assert response.description == "description_value" assert response.enable_c_d_n is True - assert response.failover_policy == compute.BackendServiceFailoverPolicy( - disable_connection_drain_on_failover=True - ) assert response.fingerprint == "fingerprint_value" assert response.health_checks == ["health_checks_value"] - assert response.iap == compute.BackendServiceIAP(enabled=True) assert response.id == 205 assert response.kind == "kind_value" assert ( @@ -704,36 +680,52 @@ def test_get_rest( response.locality_lb_policy == compute.BackendService.LocalityLbPolicy.INVALID_LB_POLICY ) - assert response.log_config == compute.BackendServiceLogConfig(enable=True) - assert response.max_stream_duration == compute.Duration(nanos=543) assert response.name == "name_value" assert response.network == "network_value" - assert response.outlier_detection == compute.OutlierDetection( - base_ejection_time=compute.Duration(nanos=543) - ) assert response.port == 453 assert response.port_name == "port_name_value" assert response.protocol == compute.BackendService.Protocol.GRPC assert response.region == "region_value" assert response.security_policy == "security_policy_value" - assert response.security_settings == compute.SecuritySettings( - client_tls_policy="client_tls_policy_value" - ) assert response.self_link == "self_link_value" assert response.session_affinity == compute.BackendService.SessionAffinity.CLIENT_IP - assert response.subsetting == compute.Subsetting( - policy=compute.Subsetting.Policy.CONSISTENT_HASH_SUBSETTING - ) assert response.timeout_sec == 1185 +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -742,34 +734,43 @@ def test_get_rest_flattened(): return_value = compute.BackendService() # Wrap the value into a proper Response obj - json_return_value = compute.BackendService.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendService.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", backend_service="backend_service_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -790,45 +791,72 @@ def test_get_health_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["resource_group_reference_resource"] = compute.ResourceGroupReference( + group="group_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.BackendServiceGroupHealth( - annotations={"key_value": "value_value"}, - health_status=[ - compute.HealthStatus(annotations={"key_value": "value_value"}) - ], - kind="kind_value", - ) + return_value = compute.BackendServiceGroupHealth(kind="kind_value",) # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_health(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.BackendServiceGroupHealth) - assert response.annotations == {"key_value": "value_value"} - assert response.health_status == [ - compute.HealthStatus(annotations={"key_value": "value_value"}) - ] assert response.kind == "kind_value" +def test_get_health_rest_bad_request( + transport: str = "rest", request_type=compute.GetHealthRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["resource_group_reference_resource"] = compute.ResourceGroupReference( + group="group_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_health(request) + + def test_get_health_rest_from_dict(): test_get_health_rest(request_type=dict) -def test_get_health_rest_flattened(): +def test_get_health_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -837,43 +865,46 @@ def test_get_health_rest_flattened(): return_value = compute.BackendServiceGroupHealth() # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceGroupHealth.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - resource_group_reference_resource = compute.ResourceGroupReference( - group="group_value" - ) - client.get_health( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", backend_service="backend_service_value", - resource_group_reference_resource=resource_group_reference_resource, + resource_group_reference_resource=compute.ResourceGroupReference( + group="group_value" + ), ) + mock_args.update(sample_request) + client.get_health(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.ResourceGroupReference.to_json( - resource_group_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_get_health_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -897,9 +928,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -909,7 +943,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -927,14 +960,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -945,7 +977,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -963,19 +994,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -984,39 +1038,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_service_resource = compute.BackendService(affinity_cookie_ttl_sec=2432) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - backend_service_resource=backend_service_resource, + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.BackendService.to_json( - backend_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1039,26 +1095,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.BackendServiceList( id="id_value", - items=[compute.BackendService(affinity_cookie_ttl_sec=2432)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1066,20 +1120,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.BackendService(affinity_cookie_ttl_sec=2432)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionBackendServicesRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1088,31 +1163,35 @@ def test_list_rest_flattened(): return_value = compute.BackendServiceList() # Wrap the value into a proper Response obj - json_return_value = compute.BackendServiceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.BackendServiceList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1125,13 +1204,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionBackendServicesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.BackendServiceList( @@ -1161,16 +1242,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.BackendService) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1182,9 +1262,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1194,7 +1281,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1212,14 +1298,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1230,7 +1315,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1248,19 +1332,46 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1269,41 +1380,46 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_service_resource = compute.BackendService(affinity_cookie_ttl_sec=2432) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", backend_service="backend_service_value", - backend_service_resource=backend_service_resource, + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.BackendService.to_json( - backend_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1327,9 +1443,16 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1339,7 +1462,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1357,14 +1479,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1375,7 +1496,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1393,19 +1513,46 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateRegionBackendServiceRequest +): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + request_init["backend_service_resource"] = compute.BackendService( + affinity_cookie_ttl_sec=2432 + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): +def test_update_rest_flattened(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1414,41 +1561,46 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - backend_service_resource = compute.BackendService(affinity_cookie_ttl_sec=2432) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", backend_service="backend_service_value", - backend_service_resource=backend_service_resource, + backend_service_resource=compute.BackendService( + affinity_cookie_ttl_sec=2432 + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "backend_service_value" in http_call[1] + str(body) + str(params) - assert compute.BackendService.to_json( - backend_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/backendServices/{backend_service}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): client = RegionBackendServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1549,8 +1701,10 @@ def test_region_backend_services_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_backend_services_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1574,29 +1728,6 @@ def test_region_backend_services_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_backend_services_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_backend_services.transports.RegionBackendServicesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionBackendServicesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_backend_services_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1608,7 +1739,6 @@ def test_region_backend_services_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_backend_services_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1624,21 +1754,6 @@ def test_region_backend_services_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_backend_services_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionBackendServicesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_backend_services_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1785,3 +1900,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionBackendServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_commitments.py b/tests/unit/gapic/compute_v1/test_region_commitments.py index 8f7db121b..67bfe9d82 100644 --- a/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_commitments import RegionCommitmentsClient from google.cloud.compute_v1.services.region_commitments import pagers from google.cloud.compute_v1.services.region_commitments import transports -from google.cloud.compute_v1.services.region_commitments.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_region_commitments_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_region_commitments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_region_commitments_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_region_commitments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_region_commitments_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_region_commitments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_region_commitments_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_region_commitments_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_region_commitments_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_region_commitments_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_region_commitments_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_region_commitments_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_region_commitments_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_region_commitments_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_region_commitments_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_region_commitments_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_region_commitments_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_region_commitments_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,35 +427,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.CommitmentAggregatedList( id="id_value", - items={ - "key_value": compute.CommitmentsScopedList( - commitments=[ - compute.Commitment( - category=compute.Commitment.Category.CATEGORY_UNSPECIFIED - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.CommitmentAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.CommitmentAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -470,28 +453,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.CommitmentsScopedList( - commitments=[ - compute.Commitment( - category=compute.Commitment.Category.CATEGORY_UNSPECIFIED - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListRegionCommitmentsRequest +): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -499,27 +497,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.CommitmentAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.CommitmentAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.CommitmentAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/commitments" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -529,11 +536,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.CommitmentAggregatedList( @@ -566,10 +575,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.CommitmentsScopedList) assert pager.get("h") is None @@ -587,7 +595,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.CommitmentsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -599,9 +607,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "commitment": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -613,14 +621,9 @@ def test_get_rest( end_timestamp="end_timestamp_value", id=205, kind="kind_value", - license_resource=compute.LicenseResourceCommitment(amount=660), name="name_value", plan=compute.Commitment.Plan.INVALID, region="region_value", - reservations=[compute.Reservation(commitment="commitment_value")], - resources=[ - compute.ResourceCommitment(accelerator_type="accelerator_type_value") - ], self_link="self_link_value", start_timestamp="start_timestamp_value", status=compute.Commitment.Status.ACTIVE, @@ -629,9 +632,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.Commitment.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Commitment.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -644,14 +647,9 @@ def test_get_rest( assert response.end_timestamp == "end_timestamp_value" assert response.id == 205 assert response.kind == "kind_value" - assert response.license_resource == compute.LicenseResourceCommitment(amount=660) assert response.name == "name_value" assert response.plan == compute.Commitment.Plan.INVALID assert response.region == "region_value" - assert response.reservations == [compute.Reservation(commitment="commitment_value")] - assert response.resources == [ - compute.ResourceCommitment(accelerator_type="accelerator_type_value") - ] assert response.self_link == "self_link_value" assert response.start_timestamp == "start_timestamp_value" assert response.status == compute.Commitment.Status.ACTIVE @@ -659,12 +657,37 @@ def test_get_rest( assert response.type_ == compute.Commitment.Type.ACCELERATOR_OPTIMIZED +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionCommitmentRequest +): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "commitment": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -672,33 +695,44 @@ def test_get_rest_flattened(): return_value = compute.Commitment() # Wrap the value into a proper Response obj - json_return_value = compute.Commitment.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Commitment.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "commitment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", commitment="commitment_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "commitment_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/commitments/{commitment}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -718,9 +752,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["commitment_resource"] = compute.Commitment( + category=compute.Commitment.Category.CATEGORY_UNSPECIFIED + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -730,7 +767,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -748,14 +784,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -766,7 +801,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -784,18 +818,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionCommitmentRequest +): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["commitment_resource"] = compute.Commitment( + category=compute.Commitment.Category.CATEGORY_UNSPECIFIED + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -803,40 +862,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - commitment_resource = compute.Commitment( - category=compute.Commitment.Category.CATEGORY_UNSPECIFIED - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - commitment_resource=commitment_resource, + commitment_resource=compute.Commitment( + category=compute.Commitment.Category.CATEGORY_UNSPECIFIED + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Commitment.to_json( - commitment_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/commitments" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -858,30 +919,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.CommitmentList( id="id_value", - items=[ - compute.Commitment( - category=compute.Commitment.Category.CATEGORY_UNSPECIFIED - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.CommitmentList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.CommitmentList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -889,21 +944,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Commitment(category=compute.Commitment.Category.CATEGORY_UNSPECIFIED) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionCommitmentsRequest +): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -911,30 +987,36 @@ def test_list_rest_flattened(): return_value = compute.CommitmentList() # Wrap the value into a proper Response obj - json_return_value = compute.CommitmentList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.CommitmentList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/commitments" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -946,11 +1028,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionCommitmentsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.CommitmentList( @@ -980,16 +1064,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Commitment) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1075,8 +1158,10 @@ def test_region_commitments_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_commitments_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1100,29 +1185,6 @@ def test_region_commitments_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_commitments_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_commitments.transports.RegionCommitmentsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionCommitmentsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_commitments_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1134,7 +1196,6 @@ def test_region_commitments_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_commitments_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1150,21 +1211,6 @@ def test_region_commitments_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_commitments_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionCommitmentsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_commitments_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1311,3 +1357,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionCommitmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_disk_types.py b/tests/unit/gapic/compute_v1/test_region_disk_types.py index eb9da29f9..fb20dba2e 100644 --- a/tests/unit/gapic/compute_v1/test_region_disk_types.py +++ b/tests/unit/gapic/compute_v1/test_region_disk_types.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_disk_types import RegionDiskTypesClient from google.cloud.compute_v1.services.region_disk_types import pagers from google.cloud.compute_v1.services.region_disk_types import transports -from google.cloud.compute_v1.services.region_disk_types.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_region_disk_types_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_region_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_region_disk_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_region_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_region_disk_types_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_region_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_region_disk_types_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_region_disk_types_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_region_disk_types_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_region_disk_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_region_disk_types_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_region_disk_types_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_region_disk_types_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_region_disk_types_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_region_disk_types_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_region_disk_types_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_region_disk_types_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_region_disk_types_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,9 +426,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk_type": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -443,7 +436,6 @@ def test_get_rest( return_value = compute.DiskType( creation_timestamp="creation_timestamp_value", default_disk_size_gb=2097, - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", id=205, kind="kind_value", @@ -455,9 +447,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskType.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -466,7 +458,6 @@ def test_get_rest( assert isinstance(response, compute.DiskType) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_disk_size_gb == 2097 - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.id == 205 assert response.kind == "kind_value" @@ -477,12 +468,37 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionDiskTypeRequest +): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk_type": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionDiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -490,31 +506,42 @@ def test_get_rest_flattened(): return_value = compute.DiskType() # Wrap the value into a proper Response obj - json_return_value = compute.DiskType.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskType.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "disk_type": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk_type="disk_type_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_type_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/diskTypes/{disk_type}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionDiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -534,26 +561,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionDiskTypeList( id="id_value", - items=[compute.DiskType(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RegionDiskTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionDiskTypeList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -561,21 +586,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.DiskType(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionDiskTypesRequest +): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionDiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -583,30 +629,36 @@ def test_list_rest_flattened(): return_value = compute.RegionDiskTypeList() # Wrap the value into a proper Response obj - json_return_value = compute.RegionDiskTypeList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionDiskTypeList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/diskTypes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionDiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -618,11 +670,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionDiskTypesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionDiskTypeList( @@ -648,16 +702,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.DiskType) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -739,8 +792,10 @@ def test_region_disk_types_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_disk_types_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -765,30 +820,6 @@ def test_region_disk_types_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_disk_types_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_disk_types.transports.RegionDiskTypesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionDiskTypesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_disk_types_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -800,7 +831,6 @@ def test_region_disk_types_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_disk_types_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -817,22 +847,6 @@ def test_region_disk_types_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_disk_types_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionDiskTypesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_disk_types_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -979,3 +993,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionDiskTypesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_disks.py b/tests/unit/gapic/compute_v1/test_region_disks.py index cde064f89..291a05bdf 100644 --- a/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/tests/unit/gapic/compute_v1/test_region_disks.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_disks import RegionDisksClient from google.cloud.compute_v1.services.region_disks import pagers from google.cloud.compute_v1.services.region_disks import transports -from google.cloud.compute_v1.services.region_disks.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_region_disks_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_region_disks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_region_disks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_region_disks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_region_disks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_region_disks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_region_disks_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_region_disks_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_region_disks_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_region_disks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_region_disks_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_region_disks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_region_disks_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_region_disks_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_region_disks_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_region_disks_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_region_disks_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_region_disks_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,9 +408,14 @@ def test_add_resource_policies_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init[ + "region_disks_add_resource_policies_request_resource" + ] = compute.RegionDisksAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -427,7 +425,6 @@ def test_add_resource_policies_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -445,14 +442,13 @@ def test_add_resource_policies_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_resource_policies(request) @@ -463,7 +459,6 @@ def test_add_resource_policies_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -481,18 +476,45 @@ def test_add_resource_policies_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_resource_policies_rest_bad_request( + transport: str = "rest", request_type=compute.AddResourcePoliciesRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init[ + "region_disks_add_resource_policies_request_resource" + ] = compute.RegionDisksAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_resource_policies(request) + + def test_add_resource_policies_rest_from_dict(): test_add_resource_policies_rest(request_type=dict) -def test_add_resource_policies_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_resource_policies_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -500,42 +522,43 @@ def test_add_resource_policies_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_disks_add_resource_policies_request_resource = compute.RegionDisksAddResourcePoliciesRequest( - resource_policies=["resource_policies_value"] - ) - client.add_resource_policies( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk="disk_value", - region_disks_add_resource_policies_request_resource=region_disks_add_resource_policies_request_resource, + region_disks_add_resource_policies_request_resource=compute.RegionDisksAddResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ), ) + mock_args.update(sample_request) + client.add_resource_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.RegionDisksAddResourcePoliciesRequest.to_json( - region_disks_add_resource_policies_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_resource_policies_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/addResourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_add_resource_policies_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -558,9 +581,10 @@ def test_create_snapshot_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init["snapshot_resource"] = compute.Snapshot(auto_created=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -570,7 +594,6 @@ def test_create_snapshot_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -588,14 +611,13 @@ def test_create_snapshot_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_snapshot(request) @@ -606,7 +628,6 @@ def test_create_snapshot_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -624,18 +645,41 @@ def test_create_snapshot_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_create_snapshot_rest_bad_request( + transport: str = "rest", request_type=compute.CreateSnapshotRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init["snapshot_resource"] = compute.Snapshot(auto_created=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_snapshot(request) + + def test_create_snapshot_rest_from_dict(): test_create_snapshot_rest(request_type=dict) -def test_create_snapshot_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_create_snapshot_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -643,40 +687,41 @@ def test_create_snapshot_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - snapshot_resource = compute.Snapshot(auto_created=True) - client.create_snapshot( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk="disk_value", - snapshot_resource=snapshot_resource, + snapshot_resource=compute.Snapshot(auto_created=True), ) + mock_args.update(sample_request) + client.create_snapshot(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.Snapshot.to_json( - snapshot_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_create_snapshot_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/createSnapshot" + % client.transport._host, + args[1], + ) + + +def test_create_snapshot_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -697,9 +742,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -709,7 +754,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -727,14 +771,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -745,7 +788,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -763,18 +805,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -782,31 +846,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk="disk_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -824,9 +895,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -834,18 +905,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq return_value = compute.Disk( creation_timestamp="creation_timestamp_value", description="description_value", - disk_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), - guest_os_features=[ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ], id=205, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, last_attach_timestamp="last_attach_timestamp_value", last_detach_timestamp="last_detach_timestamp_value", license_codes=[1360], @@ -864,14 +926,8 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq source_disk="source_disk_value", source_disk_id="source_disk_id_value", source_image="source_image_value", - source_image_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_image_id="source_image_id_value", source_snapshot="source_snapshot_value", - source_snapshot_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_snapshot_id="source_snapshot_id_value", source_storage_object="source_storage_object_value", status=compute.Disk.Status.CREATING, @@ -881,9 +937,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq ) # Wrap the value into a proper Response obj - json_return_value = compute.Disk.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Disk.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -892,18 +948,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq assert isinstance(response, compute.Disk) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.disk_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) - assert response.guest_os_features == [ - compute.GuestOsFeature( - type_=compute.GuestOsFeature.Type.FEATURE_TYPE_UNSPECIFIED - ) - ] assert response.id == 205 assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.last_attach_timestamp == "last_attach_timestamp_value" assert response.last_detach_timestamp == "last_detach_timestamp_value" assert response.license_codes == [1360] @@ -922,14 +969,8 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq assert response.source_disk == "source_disk_value" assert response.source_disk_id == "source_disk_id_value" assert response.source_image == "source_image_value" - assert response.source_image_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_image_id == "source_image_id_value" assert response.source_snapshot == "source_snapshot_value" - assert response.source_snapshot_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_snapshot_id == "source_snapshot_id_value" assert response.source_storage_object == "source_storage_object_value" assert response.status == compute.Disk.Status.CREATING @@ -938,12 +979,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionDiskReq assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -951,31 +1017,38 @@ def test_get_rest_flattened(): return_value = compute.Disk() # Wrap the value into a proper Response obj - json_return_value = compute.Disk.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Disk.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk="disk_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -995,60 +1068,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1056,31 +1130,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1100,9 +1185,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["disk_resource"] = compute.Disk( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1112,7 +1200,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1130,14 +1217,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1148,7 +1234,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1166,18 +1251,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["disk_resource"] = compute.Disk( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1185,36 +1295,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - disk_resource = compute.Disk(creation_timestamp="creation_timestamp_value") - client.insert( - project="project_value", region="region_value", disk_resource=disk_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Disk.to_json( - disk_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1234,26 +1348,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.DiskList( id="id_value", - items=[compute.Disk(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.DiskList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1261,21 +1373,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Disk(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionDisksRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1283,30 +1416,36 @@ def test_list_rest_flattened(): return_value = compute.DiskList() # Wrap the value into a proper Response obj - json_return_value = compute.DiskList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DiskList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1318,11 +1457,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.DiskList( @@ -1344,16 +1485,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Disk) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1366,9 +1506,14 @@ def test_remove_resource_policies_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init[ + "region_disks_remove_resource_policies_request_resource" + ] = compute.RegionDisksRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1378,7 +1523,6 @@ def test_remove_resource_policies_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1396,14 +1540,13 @@ def test_remove_resource_policies_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_resource_policies(request) @@ -1414,7 +1557,6 @@ def test_remove_resource_policies_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1432,18 +1574,46 @@ def test_remove_resource_policies_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_resource_policies_rest_bad_request( + transport: str = "rest", + request_type=compute.RemoveResourcePoliciesRegionDiskRequest, +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init[ + "region_disks_remove_resource_policies_request_resource" + ] = compute.RegionDisksRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_resource_policies(request) + + def test_remove_resource_policies_rest_from_dict(): test_remove_resource_policies_rest(request_type=dict) -def test_remove_resource_policies_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_resource_policies_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1451,42 +1621,43 @@ def test_remove_resource_policies_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_disks_remove_resource_policies_request_resource = compute.RegionDisksRemoveResourcePoliciesRequest( - resource_policies=["resource_policies_value"] - ) - client.remove_resource_policies( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk="disk_value", - region_disks_remove_resource_policies_request_resource=region_disks_remove_resource_policies_request_resource, + region_disks_remove_resource_policies_request_resource=compute.RegionDisksRemoveResourcePoliciesRequest( + resource_policies=["resource_policies_value"] + ), ) + mock_args.update(sample_request) + client.remove_resource_policies(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.RegionDisksRemoveResourcePoliciesRequest.to_json( - region_disks_remove_resource_policies_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_resource_policies_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/removeResourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_remove_resource_policies_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1509,9 +1680,12 @@ def test_resize_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init[ + "region_disks_resize_request_resource" + ] = compute.RegionDisksResizeRequest(size_gb=739) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1521,7 +1695,6 @@ def test_resize_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1539,14 +1712,13 @@ def test_resize_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.resize(request) @@ -1557,7 +1729,6 @@ def test_resize_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1575,18 +1746,43 @@ def test_resize_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_resize_rest_bad_request( + transport: str = "rest", request_type=compute.ResizeRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init[ + "region_disks_resize_request_resource" + ] = compute.RegionDisksResizeRequest(size_gb=739) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + def test_resize_rest_from_dict(): test_resize_rest(request_type=dict) -def test_resize_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_resize_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1594,42 +1790,43 @@ def test_resize_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_disks_resize_request_resource = compute.RegionDisksResizeRequest( - size_gb=739 - ) - client.resize( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", disk="disk_value", - region_disks_resize_request_resource=region_disks_resize_request_resource, + region_disks_resize_request_resource=compute.RegionDisksResizeRequest( + size_gb=739 + ), ) + mock_args.update(sample_request) + client.resize(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "disk_value" in http_call[1] + str(body) + str(params) - assert compute.RegionDisksResizeRequest.to_json( - region_disks_resize_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_resize_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/resize" + % client.transport._host, + args[1], + ) + + +def test_resize_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1652,60 +1849,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1713,42 +1917,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_policy_request_resource = compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=region_set_policy_request_resource, + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetPolicyRequest.to_json( - region_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1771,9 +1980,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1783,7 +1995,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1801,14 +2012,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1819,7 +2029,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1837,18 +2046,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1856,42 +2090,47 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_labels_request_resource = compute.RegionSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_labels_request_resource=region_set_labels_request_resource, + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetLabelsRequest.to_json( - region_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1914,9 +2153,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1926,9 +2168,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1938,12 +2180,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsRegionDiskRequest +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1951,42 +2221,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = RegionDisksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/disks/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2089,8 +2364,10 @@ def test_region_disks_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_disks_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2114,29 +2391,6 @@ def test_region_disks_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_disks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_disks.transports.RegionDisksTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionDisksTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_disks_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2148,7 +2402,6 @@ def test_region_disks_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_disks_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2164,21 +2417,6 @@ def test_region_disks_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_disks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionDisksClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_disks_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2325,3 +2563,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_health_check_services.py b/tests/unit/gapic/compute_v1/test_region_health_check_services.py index 4b9f049af..9591e0778 100644 --- a/tests/unit/gapic/compute_v1/test_region_health_check_services.py +++ b/tests/unit/gapic/compute_v1/test_region_health_check_services.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_health_check_services import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_health_check_services import pagers from google.cloud.compute_v1.services.region_health_check_services import transports -from google.cloud.compute_v1.services.region_health_check_services.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_region_health_check_services_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +203,7 @@ def test_region_health_check_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +211,7 @@ def test_region_health_check_services_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +220,7 @@ def test_region_health_check_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -242,7 +228,7 @@ def test_region_health_check_services_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +237,7 @@ def test_region_health_check_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -270,7 +257,7 @@ def test_region_health_check_services_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -279,6 +266,7 @@ def test_region_health_check_services_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -321,7 +309,7 @@ def test_region_health_check_services_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -338,6 +326,7 @@ def test_region_health_check_services_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -362,7 +351,7 @@ def test_region_health_check_services_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -371,6 +360,7 @@ def test_region_health_check_services_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -383,7 +373,7 @@ def test_region_health_check_services_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -392,6 +382,7 @@ def test_region_health_check_services_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,7 +403,7 @@ def test_region_health_check_services_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,6 +412,7 @@ def test_region_health_check_services_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,7 +433,7 @@ def test_region_health_check_services_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -450,6 +442,7 @@ def test_region_health_check_services_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -460,9 +453,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -472,7 +469,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -490,14 +486,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -508,7 +503,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -526,19 +520,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionHealthCheckServiceRequest +): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -547,34 +565,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check_service="health_check_service_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_service_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -595,9 +622,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -618,9 +649,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckService.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckService.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -644,13 +675,40 @@ def test_get_rest( assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionHealthCheckServiceRequest +): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -659,34 +717,43 @@ def test_get_rest_flattened(): return_value = compute.HealthCheckService() # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckService.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckService.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check_service="health_check_service_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_service_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -707,9 +774,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["health_check_service_resource"] = compute.HealthCheckService( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -719,7 +789,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -737,14 +806,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -755,7 +823,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -773,19 +840,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionHealthCheckServiceRequest +): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["health_check_service_resource"] = compute.HealthCheckService( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -794,41 +884,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_service_resource = compute.HealthCheckService( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - health_check_service_resource=health_check_service_resource, + health_check_service_resource=compute.HealthCheckService( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheckService.to_json( - health_check_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -851,30 +941,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.HealthCheckServicesList( id="id_value", - items=[ - compute.HealthCheckService( - creation_timestamp="creation_timestamp_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckServicesList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckServicesList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -882,22 +966,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.HealthCheckService(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionHealthCheckServicesRequest +): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -906,31 +1009,35 @@ def test_list_rest_flattened(): return_value = compute.HealthCheckServicesList() # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckServicesList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckServicesList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -943,13 +1050,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionHealthCheckServicesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.HealthCheckServicesList( @@ -979,16 +1088,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.HealthCheckService) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1000,9 +1108,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + request_init["health_check_service_resource"] = compute.HealthCheckService( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1012,7 +1127,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1030,14 +1144,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1048,7 +1161,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1066,19 +1178,46 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionHealthCheckServiceRequest +): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + request_init["health_check_service_resource"] = compute.HealthCheckService( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1087,43 +1226,46 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_service_resource = compute.HealthCheckService( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check_service": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check_service="health_check_service_value", - health_check_service_resource=health_check_service_resource, + health_check_service_resource=compute.HealthCheckService( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_service_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheckService.to_json( - health_check_service_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthCheckServices/{health_check_service}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = RegionHealthCheckServicesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1222,8 +1364,10 @@ def test_region_health_check_services_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_health_check_services_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1247,29 +1391,6 @@ def test_region_health_check_services_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_health_check_services_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_health_check_services.transports.RegionHealthCheckServicesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionHealthCheckServicesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_health_check_services_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1281,7 +1402,6 @@ def test_region_health_check_services_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_health_check_services_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1297,21 +1417,6 @@ def test_region_health_check_services_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_health_check_services_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionHealthCheckServicesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_health_check_services_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1460,3 +1565,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionHealthCheckServicesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_health_checks.py b/tests/unit/gapic/compute_v1/test_region_health_checks.py index 61b02d822..21dce6455 100644 --- a/tests/unit/gapic/compute_v1/test_region_health_checks.py +++ b/tests/unit/gapic/compute_v1/test_region_health_checks.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_health_checks import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_health_checks import pagers from google.cloud.compute_v1.services.region_health_checks import transports -from google.cloud.compute_v1.services.region_health_checks.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -198,7 +182,7 @@ def test_region_health_checks_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -207,6 +191,7 @@ def test_region_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,7 +199,7 @@ def test_region_health_checks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,6 +208,7 @@ def test_region_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -230,7 +216,7 @@ def test_region_health_checks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -239,6 +225,7 @@ def test_region_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -258,7 +245,7 @@ def test_region_health_checks_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -267,6 +254,7 @@ def test_region_health_checks_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -309,7 +297,7 @@ def test_region_health_checks_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -326,6 +314,7 @@ def test_region_health_checks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -350,7 +339,7 @@ def test_region_health_checks_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +348,7 @@ def test_region_health_checks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -371,7 +361,7 @@ def test_region_health_checks_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,6 +370,7 @@ def test_region_health_checks_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -394,7 +385,7 @@ def test_region_health_checks_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -403,6 +394,7 @@ def test_region_health_checks_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -417,7 +409,7 @@ def test_region_health_checks_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -426,6 +418,7 @@ def test_region_health_checks_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -436,9 +429,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -448,7 +445,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -466,14 +462,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -484,7 +479,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -502,19 +496,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionHealthCheckRequest +): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -523,34 +541,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check="health_check_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -571,9 +598,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -582,30 +613,21 @@ def test_get_rest( check_interval_sec=1884, creation_timestamp="creation_timestamp_value", description="description_value", - grpc_health_check=compute.GRPCHealthCheck( - grpc_service_name="grpc_service_name_value" - ), healthy_threshold=1819, - http2_health_check=compute.HTTP2HealthCheck(host="host_value"), - http_health_check=compute.HTTPHealthCheck(host="host_value"), - https_health_check=compute.HTTPSHealthCheck(host="host_value"), id=205, kind="kind_value", - log_config=compute.HealthCheckLogConfig(enable=True), name="name_value", region="region_value", self_link="self_link_value", - ssl_health_check=compute.SSLHealthCheck(port=453), - tcp_health_check=compute.TCPHealthCheck(port=453), timeout_sec=1185, type_=compute.HealthCheck.Type.GRPC, unhealthy_threshold=2046, ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheck.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheck.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -615,33 +637,51 @@ def test_get_rest( assert response.check_interval_sec == 1884 assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.grpc_health_check == compute.GRPCHealthCheck( - grpc_service_name="grpc_service_name_value" - ) assert response.healthy_threshold == 1819 - assert response.http2_health_check == compute.HTTP2HealthCheck(host="host_value") - assert response.http_health_check == compute.HTTPHealthCheck(host="host_value") - assert response.https_health_check == compute.HTTPSHealthCheck(host="host_value") assert response.id == 205 assert response.kind == "kind_value" - assert response.log_config == compute.HealthCheckLogConfig(enable=True) assert response.name == "name_value" assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.ssl_health_check == compute.SSLHealthCheck(port=453) - assert response.tcp_health_check == compute.TCPHealthCheck(port=453) assert response.timeout_sec == 1185 assert response.type_ == compute.HealthCheck.Type.GRPC assert response.unhealthy_threshold == 2046 +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionHealthCheckRequest +): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -650,34 +690,43 @@ def test_get_rest_flattened(): return_value = compute.HealthCheck() # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheck.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheck.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check="health_check_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -698,9 +747,10 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -710,7 +760,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -728,14 +777,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -746,7 +794,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -764,19 +811,40 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionHealthCheckRequest +): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -785,39 +853,39 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_resource = compute.HealthCheck(check_interval_sec=1884) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - health_check_resource=health_check_resource, + health_check_resource=compute.HealthCheck(check_interval_sec=1884), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheck.to_json( - health_check_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthChecks" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -838,26 +906,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.HealthCheckList( id="id_value", - items=[compute.HealthCheck(check_interval_sec=1884)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -865,20 +931,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.HealthCheck(check_interval_sec=1884)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionHealthChecksRequest +): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -887,31 +974,35 @@ def test_list_rest_flattened(): return_value = compute.HealthCheckList() # Wrap the value into a proper Response obj - json_return_value = compute.HealthCheckList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.HealthCheckList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthChecks" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -924,13 +1015,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionHealthChecksClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.HealthCheckList( @@ -960,16 +1053,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.HealthCheck) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -981,9 +1073,14 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -993,7 +1090,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1011,14 +1107,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1029,7 +1124,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1047,19 +1141,44 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionHealthCheckRequest +): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1068,41 +1187,44 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_resource = compute.HealthCheck(check_interval_sec=1884) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check="health_check_value", - health_check_resource=health_check_resource, + health_check_resource=compute.HealthCheck(check_interval_sec=1884), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheck.to_json( - health_check_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1124,9 +1246,14 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1136,7 +1263,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1154,14 +1280,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1172,7 +1297,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1190,19 +1314,44 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateRegionHealthCheckRequest +): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + request_init["health_check_resource"] = compute.HealthCheck(check_interval_sec=1884) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): +def test_update_rest_flattened(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1211,41 +1360,44 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - health_check_resource = compute.HealthCheck(check_interval_sec=1884) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_check": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", health_check="health_check_value", - health_check_resource=health_check_resource, + health_check_resource=compute.HealthCheck(check_interval_sec=1884), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "health_check_value" in http_call[1] + str(body) + str(params) - assert compute.HealthCheck.to_json( - health_check_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/healthChecks/{health_check}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): client = RegionHealthChecksClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1343,8 +1495,10 @@ def test_region_health_checks_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_health_checks_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1368,29 +1522,6 @@ def test_region_health_checks_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_health_checks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_health_checks.transports.RegionHealthChecksTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionHealthChecksTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_health_checks_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1402,7 +1533,6 @@ def test_region_health_checks_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_health_checks_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1418,21 +1548,6 @@ def test_region_health_checks_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_health_checks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionHealthChecksClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_health_checks_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1579,3 +1694,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index 6acebbaaa..1a0b89640 100644 --- a/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_instance_group_managers import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_instance_group_managers import pagers from google.cloud.compute_v1.services.region_instance_group_managers import transports -from google.cloud.compute_v1.services.region_instance_group_managers.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_region_instance_group_managers_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +203,7 @@ def test_region_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +211,7 @@ def test_region_instance_group_managers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +220,7 @@ def test_region_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -242,7 +228,7 @@ def test_region_instance_group_managers_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +237,7 @@ def test_region_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -270,7 +257,7 @@ def test_region_instance_group_managers_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -279,6 +266,7 @@ def test_region_instance_group_managers_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -321,7 +309,7 @@ def test_region_instance_group_managers_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -338,6 +326,7 @@ def test_region_instance_group_managers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -362,7 +351,7 @@ def test_region_instance_group_managers_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -371,6 +360,7 @@ def test_region_instance_group_managers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -383,7 +373,7 @@ def test_region_instance_group_managers_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -392,6 +382,7 @@ def test_region_instance_group_managers_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,7 +403,7 @@ def test_region_instance_group_managers_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,6 +412,7 @@ def test_region_instance_group_managers_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,7 +433,7 @@ def test_region_instance_group_managers_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -450,6 +442,7 @@ def test_region_instance_group_managers_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -461,9 +454,18 @@ def test_abandon_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ] = compute.RegionInstanceGroupManagersAbandonInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -473,7 +475,6 @@ def test_abandon_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -491,14 +492,13 @@ def test_abandon_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.abandon_instances(request) @@ -509,7 +509,6 @@ def test_abandon_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -527,19 +526,49 @@ def test_abandon_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_abandon_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.AbandonInstancesRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_abandon_instances_request_resource" + ] = compute.RegionInstanceGroupManagersAbandonInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.abandon_instances(request) + + def test_abandon_instances_rest_from_dict(): test_abandon_instances_rest(request_type=dict) -def test_abandon_instances_rest_flattened(): +def test_abandon_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -548,43 +577,46 @@ def test_abandon_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_abandon_instances_request_resource = compute.RegionInstanceGroupManagersAbandonInstancesRequest( - instances=["instances_value"] - ) - client.abandon_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_abandon_instances_request_resource=region_instance_group_managers_abandon_instances_request_resource, + region_instance_group_managers_abandon_instances_request_resource=compute.RegionInstanceGroupManagersAbandonInstancesRequest( + instances=["instances_value"] + ), ) + mock_args.update(sample_request) + client.abandon_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersAbandonInstancesRequest.to_json( - region_instance_group_managers_abandon_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/abandonInstances" + % client.transport._host, + args[1], + ) -def test_abandon_instances_rest_flattened_error(): +def test_abandon_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -609,9 +641,16 @@ def test_apply_updates_to_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ] = compute.RegionInstanceGroupManagersApplyUpdatesRequest(all_instances=True) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -621,7 +660,6 @@ def test_apply_updates_to_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -639,14 +677,13 @@ def test_apply_updates_to_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.apply_updates_to_instances(request) @@ -657,7 +694,6 @@ def test_apply_updates_to_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -675,19 +711,47 @@ def test_apply_updates_to_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_apply_updates_to_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_apply_updates_request_resource" + ] = compute.RegionInstanceGroupManagersApplyUpdatesRequest(all_instances=True) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.apply_updates_to_instances(request) + + def test_apply_updates_to_instances_rest_from_dict(): test_apply_updates_to_instances_rest(request_type=dict) -def test_apply_updates_to_instances_rest_flattened(): +def test_apply_updates_to_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -696,43 +760,46 @@ def test_apply_updates_to_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_apply_updates_request_resource = compute.RegionInstanceGroupManagersApplyUpdatesRequest( - all_instances=True - ) - client.apply_updates_to_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_apply_updates_request_resource=region_instance_group_managers_apply_updates_request_resource, + region_instance_group_managers_apply_updates_request_resource=compute.RegionInstanceGroupManagersApplyUpdatesRequest( + all_instances=True + ), ) + mock_args.update(sample_request) + client.apply_updates_to_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersApplyUpdatesRequest.to_json( - region_instance_group_managers_apply_updates_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/applyUpdatesToInstances" + % client.transport._host, + args[1], + ) -def test_apply_updates_to_instances_rest_flattened_error(): +def test_apply_updates_to_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -757,9 +824,18 @@ def test_create_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_create_instances_request_resource" + ] = compute.RegionInstanceGroupManagersCreateInstancesRequest( + instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -769,7 +845,6 @@ def test_create_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -787,14 +862,13 @@ def test_create_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.create_instances(request) @@ -805,7 +879,6 @@ def test_create_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -823,19 +896,49 @@ def test_create_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_create_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.CreateInstancesRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_create_instances_request_resource" + ] = compute.RegionInstanceGroupManagersCreateInstancesRequest( + instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.create_instances(request) + + def test_create_instances_rest_from_dict(): test_create_instances_rest(request_type=dict) -def test_create_instances_rest_flattened(): +def test_create_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -844,43 +947,46 @@ def test_create_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_create_instances_request_resource = compute.RegionInstanceGroupManagersCreateInstancesRequest( - instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] - ) - client.create_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_create_instances_request_resource=region_instance_group_managers_create_instances_request_resource, + region_instance_group_managers_create_instances_request_resource=compute.RegionInstanceGroupManagersCreateInstancesRequest( + instances=[compute.PerInstanceConfig(fingerprint="fingerprint_value")] + ), ) + mock_args.update(sample_request) + client.create_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersCreateInstancesRequest.to_json( - region_instance_group_managers_create_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/createInstances" + % client.transport._host, + args[1], + ) -def test_create_instances_rest_flattened_error(): +def test_create_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -905,9 +1011,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -917,7 +1027,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -935,14 +1044,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -953,7 +1061,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -971,19 +1078,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -992,34 +1124,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1041,9 +1182,18 @@ def test_delete_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ] = compute.RegionInstanceGroupManagersDeleteInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1053,7 +1203,6 @@ def test_delete_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1071,14 +1220,13 @@ def test_delete_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_instances(request) @@ -1089,7 +1237,6 @@ def test_delete_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1107,19 +1254,49 @@ def test_delete_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteInstancesRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_delete_instances_request_resource" + ] = compute.RegionInstanceGroupManagersDeleteInstancesRequest( + instances=["instances_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_instances(request) + + def test_delete_instances_rest_from_dict(): test_delete_instances_rest(request_type=dict) -def test_delete_instances_rest_flattened(): +def test_delete_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1128,43 +1305,46 @@ def test_delete_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_delete_instances_request_resource = compute.RegionInstanceGroupManagersDeleteInstancesRequest( - instances=["instances_value"] - ) - client.delete_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_delete_instances_request_resource=region_instance_group_managers_delete_instances_request_resource, + region_instance_group_managers_delete_instances_request_resource=compute.RegionInstanceGroupManagersDeleteInstancesRequest( + instances=["instances_value"] + ), ) + mock_args.update(sample_request) + client.delete_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersDeleteInstancesRequest.to_json( - region_instance_group_managers_delete_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deleteInstances" + % client.transport._host, + args[1], + ) -def test_delete_instances_rest_flattened_error(): +def test_delete_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1189,9 +1369,16 @@ def test_delete_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ] = compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(names=["names_value"]) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1201,7 +1388,6 @@ def test_delete_per_instance_configs_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1219,14 +1405,13 @@ def test_delete_per_instance_configs_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete_per_instance_configs(request) @@ -1237,7 +1422,6 @@ def test_delete_per_instance_configs_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1255,19 +1439,47 @@ def test_delete_per_instance_configs_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_manager_delete_instance_config_req_resource" + ] = compute.RegionInstanceGroupManagerDeleteInstanceConfigReq(names=["names_value"]) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete_per_instance_configs(request) + + def test_delete_per_instance_configs_rest_from_dict(): test_delete_per_instance_configs_rest(request_type=dict) -def test_delete_per_instance_configs_rest_flattened(): +def test_delete_per_instance_configs_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1276,43 +1488,46 @@ def test_delete_per_instance_configs_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_manager_delete_instance_config_req_resource = compute.RegionInstanceGroupManagerDeleteInstanceConfigReq( - names=["names_value"] - ) - client.delete_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_manager_delete_instance_config_req_resource=region_instance_group_manager_delete_instance_config_req_resource, + region_instance_group_manager_delete_instance_config_req_resource=compute.RegionInstanceGroupManagerDeleteInstanceConfigReq( + names=["names_value"] + ), ) + mock_args.update(sample_request) + client.delete_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagerDeleteInstanceConfigReq.to_json( - region_instance_group_manager_delete_instance_config_req_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/deletePerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_delete_per_instance_configs_rest_flattened_error(): +def test_delete_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1336,119 +1551,94 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ], base_instance_name="base_instance_name_value", creation_timestamp="creation_timestamp_value", - current_actions=compute.InstanceGroupManagerActionsSummary(abandoning=1041), description="description_value", - distribution_policy=compute.DistributionPolicy( - target_shape=compute.DistributionPolicy.TargetShape.ANY - ), fingerprint="fingerprint_value", id=205, instance_group="instance_group_value", instance_template="instance_template_value", kind="kind_value", name="name_value", - named_ports=[compute.NamedPort(name="name_value")], region="region_value", self_link="self_link_value", - stateful_policy=compute.StatefulPolicy( - preserved_state=compute.StatefulPolicyPreservedState( - disks={ - "key_value": compute.StatefulPolicyPreservedStateDiskDevice( - auto_delete=compute.StatefulPolicyPreservedStateDiskDevice.AutoDelete.NEVER - ) - } - ) - ), - status=compute.InstanceGroupManagerStatus(autoscaler="autoscaler_value"), target_pools=["target_pools_value"], target_size=1185, - update_policy=compute.InstanceGroupManagerUpdatePolicy( - instance_redistribution_type="instance_redistribution_type_value" - ), - versions=[ - compute.InstanceGroupManagerVersion( - instance_template="instance_template_value" - ) - ], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupManager.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupManager.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.InstanceGroupManager) - assert response.auto_healing_policies == [ - compute.InstanceGroupManagerAutoHealingPolicy(health_check="health_check_value") - ] assert response.base_instance_name == "base_instance_name_value" assert response.creation_timestamp == "creation_timestamp_value" - assert response.current_actions == compute.InstanceGroupManagerActionsSummary( - abandoning=1041 - ) assert response.description == "description_value" - assert response.distribution_policy == compute.DistributionPolicy( - target_shape=compute.DistributionPolicy.TargetShape.ANY - ) assert response.fingerprint == "fingerprint_value" assert response.id == 205 assert response.instance_group == "instance_group_value" assert response.instance_template == "instance_template_value" assert response.kind == "kind_value" assert response.name == "name_value" - assert response.named_ports == [compute.NamedPort(name="name_value")] assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.stateful_policy == compute.StatefulPolicy( - preserved_state=compute.StatefulPolicyPreservedState( - disks={ - "key_value": compute.StatefulPolicyPreservedStateDiskDevice( - auto_delete=compute.StatefulPolicyPreservedStateDiskDevice.AutoDelete.NEVER - ) - } - ) - ) - assert response.status == compute.InstanceGroupManagerStatus( - autoscaler="autoscaler_value" - ) assert response.target_pools == ["target_pools_value"] assert response.target_size == 1185 - assert response.update_policy == compute.InstanceGroupManagerUpdatePolicy( - instance_redistribution_type="instance_redistribution_type_value" - ) - assert response.versions == [ - compute.InstanceGroupManagerVersion(instance_template="instance_template_value") - ] assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionInstanceGroupManagerRequest +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1457,34 +1647,43 @@ def test_get_rest_flattened(): return_value = compute.InstanceGroupManager() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroupManager.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroupManager.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1506,9 +1705,16 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1518,7 +1724,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1536,14 +1741,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1554,7 +1758,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1572,19 +1775,47 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" -def test_insert_rest_from_dict(): - test_insert_rest(request_type=dict) - - -def test_insert_rest_flattened(): +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertRegionInstanceGroupManagerRequest, +): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + +def test_insert_rest_from_dict(): + test_insert_rest(request_type=dict) + + +def test_insert_rest_flattened(transport: str = "rest"): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1593,45 +1824,45 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_manager_resource = compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - instance_group_manager_resource=instance_group_manager_resource, + instance_group_manager_resource=compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManager.to_json( - instance_group_manager_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers" + % client.transport._host, + args[1], + ) -def test_insert_rest_flattened_error(): +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1658,34 +1889,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionInstanceGroupManagerList( id="id_value", - items=[ - compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RegionInstanceGroupManagerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionInstanceGroupManagerList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1693,28 +1914,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionInstanceGroupManagersRequest +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1723,31 +1957,35 @@ def test_list_rest_flattened(): return_value = compute.RegionInstanceGroupManagerList() # Wrap the value into a proper Response obj - json_return_value = compute.RegionInstanceGroupManagerList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionInstanceGroupManagerList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1760,13 +1998,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionInstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionInstanceGroupManagerList( @@ -1798,16 +2038,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceGroupManager) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1820,53 +2059,71 @@ def test_list_errors_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionInstanceGroupManagersListErrorsResponse( - items=[ - compute.InstanceManagedByIgmError( - error=compute.InstanceManagedByIgmErrorManagedInstanceError( - code="code_value" - ) - ) - ], next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_errors(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListErrorsPager) - assert response.items == [ - compute.InstanceManagedByIgmError( - error=compute.InstanceManagedByIgmErrorManagedInstanceError( - code="code_value" - ) - ) - ] assert response.next_page_token == "next_page_token_value" +def test_list_errors_rest_bad_request( + transport: str = "rest", + request_type=compute.ListErrorsRegionInstanceGroupManagersRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_errors(request) + + def test_list_errors_rest_from_dict(): test_list_errors_rest(request_type=dict) -def test_list_errors_rest_flattened(): +def test_list_errors_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1875,36 +2132,45 @@ def test_list_errors_rest_flattened(): return_value = compute.RegionInstanceGroupManagersListErrorsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupManagersListErrorsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_errors( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.list_errors(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listErrors" + % client.transport._host, + args[1], + ) -def test_list_errors_rest_flattened_error(): +def test_list_errors_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1918,13 +2184,15 @@ def test_list_errors_rest_flattened_error(): ) -def test_list_errors_pager(): +def test_list_errors_rest_pager(): client = RegionInstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionInstanceGroupManagersListErrorsResponse( @@ -1962,16 +2230,19 @@ def test_list_errors_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_errors(request={}) + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } - assert pager._metadata == metadata + pager = client.list_errors(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceManagedByIgmError) for i in results) - pages = list(client.list_errors(request={}).pages) + pages = list(client.list_errors(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1984,49 +2255,71 @@ def test_list_managed_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionInstanceGroupManagersListInstancesResponse( - managed_instances=[ - compute.ManagedInstance( - current_action=compute.ManagedInstance.CurrentAction.ABANDONING - ) - ], next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_managed_instances(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListManagedInstancesPager) - assert response.managed_instances == [ - compute.ManagedInstance( - current_action=compute.ManagedInstance.CurrentAction.ABANDONING - ) - ] assert response.next_page_token == "next_page_token_value" +def test_list_managed_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.ListManagedInstancesRegionInstanceGroupManagersRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_managed_instances(request) + + def test_list_managed_instances_rest_from_dict(): test_list_managed_instances_rest(request_type=dict) -def test_list_managed_instances_rest_flattened(): +def test_list_managed_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2035,36 +2328,45 @@ def test_list_managed_instances_rest_flattened(): return_value = compute.RegionInstanceGroupManagersListInstancesResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupManagersListInstancesResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_managed_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.list_managed_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listManagedInstances" + % client.transport._host, + args[1], + ) -def test_list_managed_instances_rest_flattened_error(): +def test_list_managed_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2078,13 +2380,15 @@ def test_list_managed_instances_rest_flattened_error(): ) -def test_list_managed_instances_pager(): +def test_list_managed_instances_rest_pager(): client = RegionInstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionInstanceGroupManagersListInstancesResponse( @@ -2122,16 +2426,19 @@ def test_list_managed_instances_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_managed_instances(request={}) + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } - assert pager._metadata == metadata + pager = client.list_managed_instances(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ManagedInstance) for i in results) - pages = list(client.list_managed_instances(request={}).pages) + pages = list(client.list_managed_instances(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2144,45 +2451,71 @@ def test_list_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp( - items=[compute.PerInstanceConfig(fingerprint="fingerprint_value")], next_page_token="next_page_token_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_per_instance_configs(request) # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPerInstanceConfigsPager) - assert response.items == [ - compute.PerInstanceConfig(fingerprint="fingerprint_value") - ] assert response.next_page_token == "next_page_token_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.ListPerInstanceConfigsRegionInstanceGroupManagersRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_per_instance_configs(request) def test_list_per_instance_configs_rest_from_dict(): test_list_per_instance_configs_rest(request_type=dict) -def test_list_per_instance_configs_rest_flattened(): +def test_list_per_instance_configs_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2191,36 +2524,45 @@ def test_list_per_instance_configs_rest_flattened(): return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupManagersListInstanceConfigsResp.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", ) + mock_args.update(sample_request) + client.list_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/listPerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_list_per_instance_configs_rest_flattened_error(): +def test_list_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2234,13 +2576,15 @@ def test_list_per_instance_configs_rest_flattened_error(): ) -def test_list_per_instance_configs_pager(): +def test_list_per_instance_configs_rest_pager(): client = RegionInstanceGroupManagersClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionInstanceGroupManagersListInstanceConfigsResp( @@ -2275,16 +2619,19 @@ def test_list_per_instance_configs_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_per_instance_configs(request={}) + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } - assert pager._metadata == metadata + pager = client.list_per_instance_configs(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.PerInstanceConfig) for i in results) - pages = list(client.list_per_instance_configs(request={}).pages) + pages = list(client.list_per_instance_configs(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -2296,9 +2643,20 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2308,7 +2666,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2326,14 +2683,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -2344,7 +2700,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2362,19 +2717,50 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionInstanceGroupManagerRequest +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resource"] = compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2383,47 +2769,50 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_group_manager_resource = compute.InstanceGroupManager( - auto_healing_policies=[ - compute.InstanceGroupManagerAutoHealingPolicy( - health_check="health_check_value" - ) - ] - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - instance_group_manager_resource=instance_group_manager_resource, + instance_group_manager_resource=compute.InstanceGroupManager( + auto_healing_policies=[ + compute.InstanceGroupManagerAutoHealingPolicy( + health_check="health_check_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceGroupManager.to_json( - instance_group_manager_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}" + % client.transport._host, + args[1], + ) -def test_patch_rest_flattened_error(): +def test_patch_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2452,9 +2841,20 @@ def test_patch_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ] = compute.RegionInstanceGroupManagerPatchInstanceConfigReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2464,7 +2864,6 @@ def test_patch_per_instance_configs_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2482,14 +2881,13 @@ def test_patch_per_instance_configs_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch_per_instance_configs(request) @@ -2500,7 +2898,6 @@ def test_patch_per_instance_configs_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2518,19 +2915,51 @@ def test_patch_per_instance_configs_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_manager_patch_instance_config_req_resource" + ] = compute.RegionInstanceGroupManagerPatchInstanceConfigReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_per_instance_configs(request) + + def test_patch_per_instance_configs_rest_from_dict(): test_patch_per_instance_configs_rest(request_type=dict) -def test_patch_per_instance_configs_rest_flattened(): +def test_patch_per_instance_configs_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2539,45 +2968,48 @@ def test_patch_per_instance_configs_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_manager_patch_instance_config_req_resource = compute.RegionInstanceGroupManagerPatchInstanceConfigReq( - per_instance_configs=[ - compute.PerInstanceConfig(fingerprint="fingerprint_value") - ] - ) - client.patch_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_manager_patch_instance_config_req_resource=region_instance_group_manager_patch_instance_config_req_resource, + region_instance_group_manager_patch_instance_config_req_resource=compute.RegionInstanceGroupManagerPatchInstanceConfigReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ), ) + mock_args.update(sample_request) + client.patch_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagerPatchInstanceConfigReq.to_json( - region_instance_group_manager_patch_instance_config_req_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/patchPerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_patch_per_instance_configs_rest_flattened_error(): +def test_patch_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2604,9 +3036,18 @@ def test_recreate_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_recreate_request_resource" + ] = compute.RegionInstanceGroupManagersRecreateRequest( + instances=["instances_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2616,7 +3057,6 @@ def test_recreate_instances_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2634,14 +3074,13 @@ def test_recreate_instances_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.recreate_instances(request) @@ -2652,7 +3091,6 @@ def test_recreate_instances_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2670,19 +3108,49 @@ def test_recreate_instances_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_recreate_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.RecreateInstancesRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_recreate_request_resource" + ] = compute.RegionInstanceGroupManagersRecreateRequest( + instances=["instances_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.recreate_instances(request) + + def test_recreate_instances_rest_from_dict(): test_recreate_instances_rest(request_type=dict) -def test_recreate_instances_rest_flattened(): +def test_recreate_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2691,43 +3159,46 @@ def test_recreate_instances_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_recreate_request_resource = compute.RegionInstanceGroupManagersRecreateRequest( - instances=["instances_value"] - ) - client.recreate_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_recreate_request_resource=region_instance_group_managers_recreate_request_resource, + region_instance_group_managers_recreate_request_resource=compute.RegionInstanceGroupManagersRecreateRequest( + instances=["instances_value"] + ), ) + mock_args.update(sample_request) + client.recreate_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersRecreateRequest.to_json( - region_instance_group_managers_recreate_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/recreateInstances" + % client.transport._host, + args[1], + ) -def test_recreate_instances_rest_flattened_error(): +def test_recreate_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2752,9 +3223,13 @@ def test_resize_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2764,7 +3239,6 @@ def test_resize_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2782,14 +3256,13 @@ def test_resize_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.resize(request) @@ -2800,7 +3273,6 @@ def test_resize_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2818,19 +3290,44 @@ def test_resize_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_resize_rest_bad_request( + transport: str = "rest", + request_type=compute.ResizeRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + def test_resize_rest_from_dict(): test_resize_rest(request_type=dict) -def test_resize_rest_flattened(): +def test_resize_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2839,36 +3336,44 @@ def test_resize_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.resize( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", size=443, ) + mock_args.update(sample_request) + client.resize(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert str(443) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resize" + % client.transport._host, + args[1], + ) -def test_resize_rest_flattened_error(): +def test_resize_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -2891,9 +3396,18 @@ def test_set_instance_template_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_set_template_request_resource" + ] = compute.RegionInstanceGroupManagersSetTemplateRequest( + instance_template="instance_template_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -2903,7 +3417,6 @@ def test_set_instance_template_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -2921,14 +3434,13 @@ def test_set_instance_template_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_instance_template(request) @@ -2939,7 +3451,6 @@ def test_set_instance_template_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -2957,19 +3468,49 @@ def test_set_instance_template_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_instance_template_rest_bad_request( + transport: str = "rest", + request_type=compute.SetInstanceTemplateRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_set_template_request_resource" + ] = compute.RegionInstanceGroupManagersSetTemplateRequest( + instance_template="instance_template_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_instance_template(request) + + def test_set_instance_template_rest_from_dict(): test_set_instance_template_rest(request_type=dict) -def test_set_instance_template_rest_flattened(): +def test_set_instance_template_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -2978,43 +3519,46 @@ def test_set_instance_template_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_set_template_request_resource = compute.RegionInstanceGroupManagersSetTemplateRequest( - instance_template="instance_template_value" - ) - client.set_instance_template( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_set_template_request_resource=region_instance_group_managers_set_template_request_resource, + region_instance_group_managers_set_template_request_resource=compute.RegionInstanceGroupManagersSetTemplateRequest( + instance_template="instance_template_value" + ), ) + mock_args.update(sample_request) + client.set_instance_template(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersSetTemplateRequest.to_json( - region_instance_group_managers_set_template_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setInstanceTemplate" + % client.transport._host, + args[1], + ) -def test_set_instance_template_rest_flattened_error(): +def test_set_instance_template_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3039,9 +3583,18 @@ def test_set_target_pools_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ] = compute.RegionInstanceGroupManagersSetTargetPoolsRequest( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3051,7 +3604,6 @@ def test_set_target_pools_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3069,14 +3621,13 @@ def test_set_target_pools_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_target_pools(request) @@ -3087,7 +3638,6 @@ def test_set_target_pools_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3105,19 +3655,49 @@ def test_set_target_pools_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_target_pools_rest_bad_request( + transport: str = "rest", + request_type=compute.SetTargetPoolsRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_managers_set_target_pools_request_resource" + ] = compute.RegionInstanceGroupManagersSetTargetPoolsRequest( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_target_pools(request) + + def test_set_target_pools_rest_from_dict(): test_set_target_pools_rest(request_type=dict) -def test_set_target_pools_rest_flattened(): +def test_set_target_pools_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -3126,43 +3706,46 @@ def test_set_target_pools_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_managers_set_target_pools_request_resource = compute.RegionInstanceGroupManagersSetTargetPoolsRequest( - fingerprint="fingerprint_value" - ) - client.set_target_pools( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_managers_set_target_pools_request_resource=region_instance_group_managers_set_target_pools_request_resource, + region_instance_group_managers_set_target_pools_request_resource=compute.RegionInstanceGroupManagersSetTargetPoolsRequest( + fingerprint="fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_target_pools(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagersSetTargetPoolsRequest.to_json( - region_instance_group_managers_set_target_pools_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/setTargetPools" + % client.transport._host, + args[1], + ) -def test_set_target_pools_rest_flattened_error(): +def test_set_target_pools_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3187,9 +3770,20 @@ def test_update_per_instance_configs_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ] = compute.RegionInstanceGroupManagerUpdateInstanceConfigReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -3199,7 +3793,6 @@ def test_update_per_instance_configs_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -3217,14 +3810,13 @@ def test_update_per_instance_configs_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update_per_instance_configs(request) @@ -3235,7 +3827,6 @@ def test_update_per_instance_configs_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -3253,19 +3844,51 @@ def test_update_per_instance_configs_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_per_instance_configs_rest_bad_request( + transport: str = "rest", + request_type=compute.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest, +): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init[ + "region_instance_group_manager_update_instance_config_req_resource" + ] = compute.RegionInstanceGroupManagerUpdateInstanceConfigReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update_per_instance_configs(request) + + def test_update_per_instance_configs_rest_from_dict(): test_update_per_instance_configs_rest(request_type=dict) -def test_update_per_instance_configs_rest_flattened(): +def test_update_per_instance_configs_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -3274,45 +3897,48 @@ def test_update_per_instance_configs_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_group_manager_update_instance_config_req_resource = compute.RegionInstanceGroupManagerUpdateInstanceConfigReq( - per_instance_configs=[ - compute.PerInstanceConfig(fingerprint="fingerprint_value") - ] - ) - client.update_per_instance_configs( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group_manager="instance_group_manager_value", - region_instance_group_manager_update_instance_config_req_resource=region_instance_group_manager_update_instance_config_req_resource, + region_instance_group_manager_update_instance_config_req_resource=compute.RegionInstanceGroupManagerUpdateInstanceConfigReq( + per_instance_configs=[ + compute.PerInstanceConfig(fingerprint="fingerprint_value") + ] + ), ) + mock_args.update(sample_request) + client.update_per_instance_configs(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_manager_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupManagerUpdateInstanceConfigReq.to_json( - region_instance_group_manager_update_instance_config_req_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/updatePerInstanceConfigs" + % client.transport._host, + args[1], + ) -def test_update_per_instance_configs_rest_flattened_error(): +def test_update_per_instance_configs_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupManagersClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -3427,8 +4053,10 @@ def test_region_instance_group_managers_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_instance_group_managers_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -3452,29 +4080,6 @@ def test_region_instance_group_managers_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_instance_group_managers_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_instance_group_managers.transports.RegionInstanceGroupManagersTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionInstanceGroupManagersTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_instance_group_managers_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -3486,7 +4091,6 @@ def test_region_instance_group_managers_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_instance_group_managers_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -3502,21 +4106,6 @@ def test_region_instance_group_managers_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_instance_group_managers_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionInstanceGroupManagersClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_instance_group_managers_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -3665,3 +4254,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionInstanceGroupManagersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_instance_groups.py b/tests/unit/gapic/compute_v1/test_region_instance_groups.py index b388ac57c..f2b7bea95 100644 --- a/tests/unit/gapic/compute_v1/test_region_instance_groups.py +++ b/tests/unit/gapic/compute_v1/test_region_instance_groups.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_instance_groups import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_instance_groups import pagers from google.cloud.compute_v1.services.region_instance_groups import transports -from google.cloud.compute_v1.services.region_instance_groups.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_region_instance_groups_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_region_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_region_instance_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_region_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_region_instance_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_region_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_region_instance_groups_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_region_instance_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_region_instance_groups_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_region_instance_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_region_instance_groups_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_region_instance_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_region_instance_groups_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_region_instance_groups_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_region_instance_groups_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_region_instance_groups_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_region_instance_groups_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_region_instance_groups_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -454,9 +447,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -468,7 +465,6 @@ def test_get_rest( id=205, kind="kind_value", name="name_value", - named_ports=[compute.NamedPort(name="name_value")], network="network_value", region="region_value", self_link="self_link_value", @@ -478,9 +474,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroup.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -493,7 +489,6 @@ def test_get_rest( assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.named_ports == [compute.NamedPort(name="name_value")] assert response.network == "network_value" assert response.region == "region_value" assert response.self_link == "self_link_value" @@ -502,13 +497,40 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionInstanceGroupRequest +): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -517,34 +539,43 @@ def test_get_rest_flattened(): return_value = compute.InstanceGroup() # Wrap the value into a proper Response obj - json_return_value = compute.InstanceGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.InstanceGroup.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group="instance_group_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -565,28 +596,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionInstanceGroupList( id="id_value", - items=[ - compute.InstanceGroup(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RegionInstanceGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionInstanceGroupList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -594,22 +621,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.InstanceGroup(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionInstanceGroupsRequest +): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -618,31 +664,35 @@ def test_list_rest_flattened(): return_value = compute.RegionInstanceGroupList() # Wrap the value into a proper Response obj - json_return_value = compute.RegionInstanceGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionInstanceGroupList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroups" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -655,13 +705,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionInstanceGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionInstanceGroupList( @@ -691,16 +743,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceGroup) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -713,28 +764,35 @@ def test_list_instances_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + request_init[ + "region_instance_groups_list_instances_request_resource" + ] = compute.RegionInstanceGroupsListInstancesRequest( + instance_state=compute.RegionInstanceGroupsListInstancesRequest.InstanceState.ALL + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionInstanceGroupsListInstances( id="id_value", - items=[compute.InstanceWithNamedPorts(instance="instance_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupsListInstances.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_instances(request) @@ -742,20 +800,51 @@ def test_list_instances_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInstancesPager) assert response.id == "id_value" - assert response.items == [compute.InstanceWithNamedPorts(instance="instance_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_instances_rest_bad_request( + transport: str = "rest", + request_type=compute.ListInstancesRegionInstanceGroupsRequest, +): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + request_init[ + "region_instance_groups_list_instances_request_resource" + ] = compute.RegionInstanceGroupsListInstancesRequest( + instance_state=compute.RegionInstanceGroupsListInstancesRequest.InstanceState.ALL + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_instances(request) def test_list_instances_rest_from_dict(): test_list_instances_rest(request_type=dict) -def test_list_instances_rest_flattened(): +def test_list_instances_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -764,45 +853,48 @@ def test_list_instances_rest_flattened(): return_value = compute.RegionInstanceGroupsListInstances() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.RegionInstanceGroupsListInstances.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_groups_list_instances_request_resource = compute.RegionInstanceGroupsListInstancesRequest( - instance_state=compute.RegionInstanceGroupsListInstancesRequest.InstanceState.ALL - ) - client.list_instances( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group="instance_group_value", - region_instance_groups_list_instances_request_resource=region_instance_groups_list_instances_request_resource, + region_instance_groups_list_instances_request_resource=compute.RegionInstanceGroupsListInstancesRequest( + instance_state=compute.RegionInstanceGroupsListInstancesRequest.InstanceState.ALL + ), ) + mock_args.update(sample_request) + client.list_instances(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupsListInstancesRequest.to_json( - region_instance_groups_list_instances_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_list_instances_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/listInstances" + % client.transport._host, + args[1], + ) + + +def test_list_instances_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -819,13 +911,15 @@ def test_list_instances_rest_flattened_error(): ) -def test_list_instances_pager(): +def test_list_instances_rest_pager(): client = RegionInstanceGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionInstanceGroupsListInstances( @@ -860,16 +954,24 @@ def test_list_instances_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_instances(request={}) + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + sample_request[ + "region_instance_groups_list_instances_request_resource" + ] = compute.RegionInstanceGroupsListInstancesRequest( + instance_state=compute.RegionInstanceGroupsListInstancesRequest.InstanceState.ALL + ) - assert pager._metadata == metadata + pager = client.list_instances(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.InstanceWithNamedPorts) for i in results) - pages = list(client.list_instances(request={}).pages) + pages = list(client.list_instances(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -882,9 +984,18 @@ def test_set_named_ports_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + request_init[ + "region_instance_groups_set_named_ports_request_resource" + ] = compute.RegionInstanceGroupsSetNamedPortsRequest( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -894,7 +1005,6 @@ def test_set_named_ports_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -912,14 +1022,13 @@ def test_set_named_ports_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_named_ports(request) @@ -930,7 +1039,6 @@ def test_set_named_ports_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -948,19 +1056,49 @@ def test_set_named_ports_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_named_ports_rest_bad_request( + transport: str = "rest", + request_type=compute.SetNamedPortsRegionInstanceGroupRequest, +): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + request_init[ + "region_instance_groups_set_named_ports_request_resource" + ] = compute.RegionInstanceGroupsSetNamedPortsRequest( + fingerprint="fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_named_ports(request) + + def test_set_named_ports_rest_from_dict(): test_set_named_ports_rest(request_type=dict) -def test_set_named_ports_rest_flattened(): +def test_set_named_ports_rest_flattened(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -969,43 +1107,46 @@ def test_set_named_ports_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_instance_groups_set_named_ports_request_resource = compute.RegionInstanceGroupsSetNamedPortsRequest( - fingerprint="fingerprint_value" - ) - client.set_named_ports( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", instance_group="instance_group_value", - region_instance_groups_set_named_ports_request_resource=region_instance_groups_set_named_ports_request_resource, + region_instance_groups_set_named_ports_request_resource=compute.RegionInstanceGroupsSetNamedPortsRequest( + fingerprint="fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_named_ports(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "instance_group_value" in http_call[1] + str(body) + str(params) - assert compute.RegionInstanceGroupsSetNamedPortsRequest.to_json( - region_instance_groups_set_named_ports_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_named_ports_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instanceGroups/{instance_group}/setNamedPorts" + % client.transport._host, + args[1], + ) + + +def test_set_named_ports_rest_flattened_error(transport: str = "rest"): client = RegionInstanceGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1103,8 +1244,10 @@ def test_region_instance_groups_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_instance_groups_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1128,29 +1271,6 @@ def test_region_instance_groups_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_instance_groups_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_instance_groups.transports.RegionInstanceGroupsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionInstanceGroupsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_instance_groups_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1162,7 +1282,6 @@ def test_region_instance_groups_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_instance_groups_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1178,21 +1297,6 @@ def test_region_instance_groups_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_instance_groups_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionInstanceGroupsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_instance_groups_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1339,3 +1443,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionInstanceGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_instances.py b/tests/unit/gapic/compute_v1/test_region_instances.py index 191b8ad93..587bb090c 100644 --- a/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/tests/unit/gapic/compute_v1/test_region_instances.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,32 +31,16 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_instances import RegionInstancesClient from google.cloud.compute_v1.services.region_instances import transports -from google.cloud.compute_v1.services.region_instances.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -194,7 +178,7 @@ def test_region_instances_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -203,6 +187,7 @@ def test_region_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -210,7 +195,7 @@ def test_region_instances_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +204,7 @@ def test_region_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +212,7 @@ def test_region_instances_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +221,7 @@ def test_region_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -254,7 +241,7 @@ def test_region_instances_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -263,6 +250,7 @@ def test_region_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -305,7 +293,7 @@ def test_region_instances_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -322,6 +310,7 @@ def test_region_instances_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -346,7 +335,7 @@ def test_region_instances_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +344,7 @@ def test_region_instances_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -367,7 +357,7 @@ def test_region_instances_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -376,6 +366,7 @@ def test_region_instances_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -390,7 +381,7 @@ def test_region_instances_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -399,6 +390,7 @@ def test_region_instances_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -413,7 +405,7 @@ def test_region_instances_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -422,6 +414,7 @@ def test_region_instances_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -432,9 +425,12 @@ def test_bulk_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init[ + "bulk_insert_instance_resource_resource" + ] = compute.BulkInsertInstanceResource(count=553) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -444,7 +440,6 @@ def test_bulk_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -462,14 +457,13 @@ def test_bulk_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.bulk_insert(request) @@ -480,7 +474,6 @@ def test_bulk_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -498,18 +491,43 @@ def test_bulk_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_bulk_insert_rest_bad_request( + transport: str = "rest", request_type=compute.BulkInsertRegionInstanceRequest +): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init[ + "bulk_insert_instance_resource_resource" + ] = compute.BulkInsertInstanceResource(count=553) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.bulk_insert(request) + + def test_bulk_insert_rest_from_dict(): test_bulk_insert_rest(request_type=dict) -def test_bulk_insert_rest_flattened(): - client = RegionInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_bulk_insert_rest_flattened(transport: str = "rest"): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -517,40 +535,42 @@ def test_bulk_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - bulk_insert_instance_resource_resource = compute.BulkInsertInstanceResource( - count=553 - ) - client.bulk_insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - bulk_insert_instance_resource_resource=bulk_insert_instance_resource_resource, + bulk_insert_instance_resource_resource=compute.BulkInsertInstanceResource( + count=553 + ), ) + mock_args.update(sample_request) + client.bulk_insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.BulkInsertInstanceResource.to_json( - bulk_insert_instance_resource_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/instances/bulkInsert" + % client.transport._host, + args[1], + ) -def test_bulk_insert_rest_flattened_error(): - client = RegionInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_bulk_insert_rest_flattened_error(transport: str = "rest"): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -639,8 +659,10 @@ def test_region_instances_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_instances_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -664,29 +686,6 @@ def test_region_instances_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_instances_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_instances.transports.RegionInstancesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionInstancesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_instances_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -698,7 +697,6 @@ def test_region_instances_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_instances_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -714,21 +712,6 @@ def test_region_instances_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_instances_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionInstancesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_instances_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -875,3 +858,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py b/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py index 89527269d..b4c69cb22 100644 --- a/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py +++ b/tests/unit/gapic/compute_v1/test_region_network_endpoint_groups.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_network_endpoint_groups import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_network_endpoint_groups import pagers from google.cloud.compute_v1.services.region_network_endpoint_groups import transports -from google.cloud.compute_v1.services.region_network_endpoint_groups.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_region_network_endpoint_groups_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +203,7 @@ def test_region_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +211,7 @@ def test_region_network_endpoint_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +220,7 @@ def test_region_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -242,7 +228,7 @@ def test_region_network_endpoint_groups_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +237,7 @@ def test_region_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -270,7 +257,7 @@ def test_region_network_endpoint_groups_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -279,6 +266,7 @@ def test_region_network_endpoint_groups_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -321,7 +309,7 @@ def test_region_network_endpoint_groups_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -338,6 +326,7 @@ def test_region_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -362,7 +351,7 @@ def test_region_network_endpoint_groups_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -371,6 +360,7 @@ def test_region_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -383,7 +373,7 @@ def test_region_network_endpoint_groups_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -392,6 +382,7 @@ def test_region_network_endpoint_groups_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,7 +403,7 @@ def test_region_network_endpoint_groups_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,6 +412,7 @@ def test_region_network_endpoint_groups_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,7 +433,7 @@ def test_region_network_endpoint_groups_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -450,6 +442,7 @@ def test_region_network_endpoint_groups_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -461,9 +454,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -473,7 +470,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -491,14 +487,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -509,7 +504,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -527,19 +521,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteRegionNetworkEndpointGroupRequest, +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -548,34 +567,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -596,20 +624,18 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"}, - app_engine=compute.NetworkEndpointGroupAppEngine(service="service_value"), - cloud_function=compute.NetworkEndpointGroupCloudFunction( - function="function_value" - ), - cloud_run=compute.NetworkEndpointGroupCloudRun(service="service_value"), creation_timestamp="creation_timestamp_value", default_port=1289, description="description_value", @@ -626,25 +652,15 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.NetworkEndpointGroup) - assert response.annotations == {"key_value": "value_value"} - assert response.app_engine == compute.NetworkEndpointGroupAppEngine( - service="service_value" - ) - assert response.cloud_function == compute.NetworkEndpointGroupCloudFunction( - function="function_value" - ) - assert response.cloud_run == compute.NetworkEndpointGroupCloudRun( - service="service_value" - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.default_port == 1289 assert response.description == "description_value" @@ -663,13 +679,40 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionNetworkEndpointGroupRequest +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -678,34 +721,43 @@ def test_get_rest_flattened(): return_value = compute.NetworkEndpointGroup() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroup.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroup.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "network_endpoint_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", network_endpoint_group="network_endpoint_group_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "network_endpoint_group_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups/{network_endpoint_group}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -727,9 +779,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["network_endpoint_group_resource"] = compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -739,7 +794,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -757,14 +811,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -775,7 +828,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -793,19 +845,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertRegionNetworkEndpointGroupRequest, +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["network_endpoint_group_resource"] = compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -814,41 +890,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - network_endpoint_group_resource = compute.NetworkEndpointGroup( - annotations={"key_value": "value_value"} - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - network_endpoint_group_resource=network_endpoint_group_resource, + network_endpoint_group_resource=compute.NetworkEndpointGroup( + annotations={"key_value": "value_value"} + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.NetworkEndpointGroup.to_json( - network_endpoint_group_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -871,28 +947,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NetworkEndpointGroupList( id="id_value", - items=[ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -900,22 +972,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.NetworkEndpointGroup(annotations={"key_value": "value_value"}) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionNetworkEndpointGroupsRequest +): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -924,31 +1015,35 @@ def test_list_rest_flattened(): return_value = compute.NetworkEndpointGroupList() # Wrap the value into a proper Response obj - json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NetworkEndpointGroupList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/networkEndpointGroups" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionNetworkEndpointGroupsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -961,13 +1056,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionNetworkEndpointGroupsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NetworkEndpointGroupList( @@ -997,16 +1094,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NetworkEndpointGroup) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1092,8 +1188,10 @@ def test_region_network_endpoint_groups_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_network_endpoint_groups_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1117,29 +1215,6 @@ def test_region_network_endpoint_groups_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_network_endpoint_groups_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_network_endpoint_groups.transports.RegionNetworkEndpointGroupsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionNetworkEndpointGroupsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_network_endpoint_groups_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1151,7 +1226,6 @@ def test_region_network_endpoint_groups_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_network_endpoint_groups_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1167,21 +1241,6 @@ def test_region_network_endpoint_groups_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_network_endpoint_groups_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionNetworkEndpointGroupsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_network_endpoint_groups_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1330,3 +1389,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionNetworkEndpointGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py b/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py index fe13f3563..4d9d20956 100644 --- a/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py +++ b/tests/unit/gapic/compute_v1/test_region_notification_endpoints.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_notification_endpoints import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_notification_endpoints import pagers from google.cloud.compute_v1.services.region_notification_endpoints import transports -from google.cloud.compute_v1.services.region_notification_endpoints.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -210,7 +194,7 @@ def test_region_notification_endpoints_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +203,7 @@ def test_region_notification_endpoints_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +211,7 @@ def test_region_notification_endpoints_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +220,7 @@ def test_region_notification_endpoints_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -242,7 +228,7 @@ def test_region_notification_endpoints_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +237,7 @@ def test_region_notification_endpoints_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -270,7 +257,7 @@ def test_region_notification_endpoints_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -279,6 +266,7 @@ def test_region_notification_endpoints_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -321,7 +309,7 @@ def test_region_notification_endpoints_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -338,6 +326,7 @@ def test_region_notification_endpoints_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -362,7 +351,7 @@ def test_region_notification_endpoints_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -371,6 +360,7 @@ def test_region_notification_endpoints_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -383,7 +373,7 @@ def test_region_notification_endpoints_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -392,6 +382,7 @@ def test_region_notification_endpoints_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -412,7 +403,7 @@ def test_region_notification_endpoints_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -421,6 +412,7 @@ def test_region_notification_endpoints_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -441,7 +433,7 @@ def test_region_notification_endpoints_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -450,6 +442,7 @@ def test_region_notification_endpoints_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -461,9 +454,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "notification_endpoint": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -473,7 +470,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -491,14 +487,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -509,7 +504,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -527,19 +521,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", + request_type=compute.DeleteRegionNotificationEndpointRequest, +): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "notification_endpoint": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -548,34 +567,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "notification_endpoint": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", notification_endpoint="notification_endpoint_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "notification_endpoint_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -596,9 +624,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "notification_endpoint": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -606,9 +638,6 @@ def test_get_rest( return_value = compute.NotificationEndpoint( creation_timestamp="creation_timestamp_value", description="description_value", - grpc_settings=compute.NotificationEndpointGrpcSettings( - authority="authority_value" - ), id=205, kind="kind_value", name="name_value", @@ -617,9 +646,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.NotificationEndpoint.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NotificationEndpoint.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -628,9 +657,6 @@ def test_get_rest( assert isinstance(response, compute.NotificationEndpoint) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert response.grpc_settings == compute.NotificationEndpointGrpcSettings( - authority="authority_value" - ) assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" @@ -638,13 +664,40 @@ def test_get_rest( assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionNotificationEndpointRequest +): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "notification_endpoint": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -653,34 +706,43 @@ def test_get_rest_flattened(): return_value = compute.NotificationEndpoint() # Wrap the value into a proper Response obj - json_return_value = compute.NotificationEndpoint.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NotificationEndpoint.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "notification_endpoint": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", notification_endpoint="notification_endpoint_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "notification_endpoint_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints/{notification_endpoint}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -702,9 +764,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["notification_endpoint_resource"] = compute.NotificationEndpoint( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -714,7 +779,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -732,14 +796,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -750,7 +813,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -768,19 +830,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", + request_type=compute.InsertRegionNotificationEndpointRequest, +): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["notification_endpoint_resource"] = compute.NotificationEndpoint( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -789,41 +875,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - notification_endpoint_resource = compute.NotificationEndpoint( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - notification_endpoint_resource=notification_endpoint_resource, + notification_endpoint_resource=compute.NotificationEndpoint( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.NotificationEndpoint.to_json( - notification_endpoint_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -846,30 +932,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.NotificationEndpointList( id="id_value", - items=[ - compute.NotificationEndpoint( - creation_timestamp="creation_timestamp_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.NotificationEndpointList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NotificationEndpointList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -877,22 +957,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.NotificationEndpoint(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionNotificationEndpointsRequest +): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -901,31 +1000,35 @@ def test_list_rest_flattened(): return_value = compute.NotificationEndpointList() # Wrap the value into a proper Response obj - json_return_value = compute.NotificationEndpointList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.NotificationEndpointList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/notificationEndpoints" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionNotificationEndpointsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -938,13 +1041,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionNotificationEndpointsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.NotificationEndpointList( @@ -974,16 +1079,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.NotificationEndpoint) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1069,8 +1173,10 @@ def test_region_notification_endpoints_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_notification_endpoints_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1094,29 +1200,6 @@ def test_region_notification_endpoints_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_notification_endpoints_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_notification_endpoints.transports.RegionNotificationEndpointsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionNotificationEndpointsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_notification_endpoints_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1128,7 +1211,6 @@ def test_region_notification_endpoints_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_notification_endpoints_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1144,21 +1226,6 @@ def test_region_notification_endpoints_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_notification_endpoints_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionNotificationEndpointsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_notification_endpoints_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1307,3 +1374,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionNotificationEndpointsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_operations.py b/tests/unit/gapic/compute_v1/test_region_operations.py index ab978f284..3ea361ad2 100644 --- a/tests/unit/gapic/compute_v1/test_region_operations.py +++ b/tests/unit/gapic/compute_v1/test_region_operations.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_operations import RegionOperationsClient from google.cloud.compute_v1.services.region_operations import pagers from google.cloud.compute_v1.services.region_operations import transports -from google.cloud.compute_v1.services.region_operations.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_region_operations_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_region_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_region_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_region_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_region_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_region_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_region_operations_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_region_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_region_operations_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_region_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_region_operations_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_region_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_region_operations_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_region_operations_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_region_operations_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_region_operations_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_region_operations_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_region_operations_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "operation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -444,9 +437,9 @@ def test_delete_rest( return_value = compute.DeleteRegionOperationResponse() # Wrap the value into a proper Response obj - json_return_value = compute.DeleteRegionOperationResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DeleteRegionOperationResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -455,12 +448,37 @@ def test_delete_rest( assert isinstance(response, compute.DeleteRegionOperationResponse) +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionOperationRequest +): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "operation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -468,31 +486,42 @@ def test_delete_rest_flattened(): return_value = compute.DeleteRegionOperationResponse() # Wrap the value into a proper Response obj - json_return_value = compute.DeleteRegionOperationResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DeleteRegionOperationResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "operation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", operation="operation_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -512,9 +541,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "operation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -524,7 +553,6 @@ def test_get_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -542,14 +570,13 @@ def test_get_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -560,7 +587,6 @@ def test_get_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -578,18 +604,40 @@ def test_get_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionOperationRequest +): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "operation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -597,31 +645,42 @@ def test_get_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "operation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", operation="operation_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -641,26 +700,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.OperationList( id="id_value", - items=[compute.Operation(client_operation_id="client_operation_id_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -668,21 +725,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Operation(client_operation_id="client_operation_id_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionOperationsRequest +): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -690,30 +768,36 @@ def test_list_rest_flattened(): return_value = compute.OperationList() # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/operations" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -725,11 +809,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.OperationList( @@ -751,16 +837,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Operation) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -772,9 +857,9 @@ def test_wait_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "operation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -784,7 +869,6 @@ def test_wait_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -802,14 +886,13 @@ def test_wait_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.wait(request) @@ -820,7 +903,6 @@ def test_wait_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -838,18 +920,40 @@ def test_wait_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_wait_rest_bad_request( + transport: str = "rest", request_type=compute.WaitRegionOperationRequest +): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "operation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait(request) + + def test_wait_rest_from_dict(): test_wait_rest(request_type=dict) -def test_wait_rest_flattened(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_wait_rest_flattened(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -857,31 +961,42 @@ def test_wait_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.wait( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "operation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", operation="operation_value", ) + mock_args.update(sample_request) + client.wait(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/operations/{operation}/wait" + % client.transport._host, + args[1], + ) -def test_wait_rest_flattened_error(): - client = RegionOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_wait_rest_flattened_error(transport: str = "rest"): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -973,8 +1088,10 @@ def test_region_operations_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_operations_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -998,29 +1115,6 @@ def test_region_operations_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_operations_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_operations.transports.RegionOperationsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionOperationsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_operations_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1032,7 +1126,6 @@ def test_region_operations_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_operations_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1048,21 +1141,6 @@ def test_region_operations_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_operations_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionOperationsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_operations_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1209,3 +1287,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py b/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py index f107ffb9b..40ee79aa0 100644 --- a/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py +++ b/tests/unit/gapic/compute_v1/test_region_ssl_certificates.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_ssl_certificates import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_ssl_certificates import pagers from google.cloud.compute_v1.services.region_ssl_certificates import transports -from google.cloud.compute_v1.services.region_ssl_certificates.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_region_ssl_certificates_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_region_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_region_ssl_certificates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_region_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_region_ssl_certificates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_region_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_region_ssl_certificates_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_region_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_region_ssl_certificates_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_region_ssl_certificates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_region_ssl_certificates_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_region_ssl_certificates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_region_ssl_certificates_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_region_ssl_certificates_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_region_ssl_certificates_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_region_ssl_certificates_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_region_ssl_certificates_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_region_ssl_certificates_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -454,9 +447,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "ssl_certificate": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -466,7 +463,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -484,14 +480,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -502,7 +497,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -520,19 +514,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionSslCertificateRequest +): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "ssl_certificate": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -541,34 +559,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "ssl_certificate": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", ssl_certificate="ssl_certificate_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "ssl_certificate_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -589,9 +616,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "ssl_certificate": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -603,24 +634,18 @@ def test_get_rest( expire_time="expire_time_value", id=205, kind="kind_value", - managed=compute.SslCertificateManagedSslCertificate( - domain_status={"key_value": "value_value"} - ), name="name_value", private_key="private_key_value", region="region_value", self_link="self_link_value", - self_managed=compute.SslCertificateSelfManagedSslCertificate( - certificate="certificate_value" - ), subject_alternative_names=["subject_alternative_names_value"], type_=compute.SslCertificate.Type.MANAGED, ) # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificate.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -633,27 +658,48 @@ def test_get_rest( assert response.expire_time == "expire_time_value" assert response.id == 205 assert response.kind == "kind_value" - assert response.managed == compute.SslCertificateManagedSslCertificate( - domain_status={"key_value": "value_value"} - ) assert response.name == "name_value" assert response.private_key == "private_key_value" assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.self_managed == compute.SslCertificateSelfManagedSslCertificate( - certificate="certificate_value" - ) assert response.subject_alternative_names == ["subject_alternative_names_value"] assert response.type_ == compute.SslCertificate.Type.MANAGED +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionSslCertificateRequest +): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "ssl_certificate": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -662,34 +708,43 @@ def test_get_rest_flattened(): return_value = compute.SslCertificate() # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificate.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "ssl_certificate": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", ssl_certificate="ssl_certificate_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "ssl_certificate_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/sslCertificates/{ssl_certificate}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -710,9 +765,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["ssl_certificate_resource"] = compute.SslCertificate( + certificate="certificate_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -722,7 +780,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -740,14 +797,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -758,7 +814,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -776,19 +831,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionSslCertificateRequest +): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["ssl_certificate_resource"] = compute.SslCertificate( + certificate="certificate_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -797,41 +875,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - ssl_certificate_resource = compute.SslCertificate( - certificate="certificate_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - ssl_certificate_resource=ssl_certificate_resource, + ssl_certificate_resource=compute.SslCertificate( + certificate="certificate_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.SslCertificate.to_json( - ssl_certificate_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/sslCertificates" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -854,26 +932,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SslCertificateList( id="id_value", - items=[compute.SslCertificate(certificate="certificate_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificateList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -881,20 +957,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.SslCertificate(certificate="certificate_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionSslCertificatesRequest +): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -903,31 +1000,35 @@ def test_list_rest_flattened(): return_value = compute.SslCertificateList() # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificateList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/sslCertificates" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionSslCertificatesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -940,13 +1041,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionSslCertificatesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SslCertificateList( @@ -976,16 +1079,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.SslCertificate) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1071,8 +1173,10 @@ def test_region_ssl_certificates_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_ssl_certificates_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1096,29 +1200,6 @@ def test_region_ssl_certificates_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_ssl_certificates_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_ssl_certificates.transports.RegionSslCertificatesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionSslCertificatesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_ssl_certificates_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1130,7 +1211,6 @@ def test_region_ssl_certificates_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_ssl_certificates_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1146,21 +1226,6 @@ def test_region_ssl_certificates_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_ssl_certificates_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionSslCertificatesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_ssl_certificates_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1307,3 +1372,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionSslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py b/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py index 2fc88ebc5..015e0bfb9 100644 --- a/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py +++ b/tests/unit/gapic/compute_v1/test_region_target_http_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_target_http_proxies import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_target_http_proxies import pagers from google.cloud.compute_v1.services.region_target_http_proxies import transports -from google.cloud.compute_v1.services.region_target_http_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -204,7 +188,7 @@ def test_region_target_http_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -213,6 +197,7 @@ def test_region_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -220,7 +205,7 @@ def test_region_target_http_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -229,6 +214,7 @@ def test_region_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -236,7 +222,7 @@ def test_region_target_http_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -245,6 +231,7 @@ def test_region_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -264,7 +251,7 @@ def test_region_target_http_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -273,6 +260,7 @@ def test_region_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -315,7 +303,7 @@ def test_region_target_http_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -332,6 +320,7 @@ def test_region_target_http_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -356,7 +345,7 @@ def test_region_target_http_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -365,6 +354,7 @@ def test_region_target_http_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -377,7 +367,7 @@ def test_region_target_http_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -386,6 +376,7 @@ def test_region_target_http_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,7 +397,7 @@ def test_region_target_http_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -415,6 +406,7 @@ def test_region_target_http_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -435,7 +427,7 @@ def test_region_target_http_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -444,6 +436,7 @@ def test_region_target_http_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -454,9 +447,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -466,7 +463,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -484,14 +480,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -502,7 +497,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -520,19 +514,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionTargetHttpProxyRequest +): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -541,34 +559,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_http_proxy="target_http_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -589,9 +616,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -610,9 +641,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -631,13 +662,40 @@ def test_get_rest( assert response.url_map == "url_map_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionTargetHttpProxyRequest +): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -646,34 +704,43 @@ def test_get_rest_flattened(): return_value = compute.TargetHttpProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_http_proxy="target_http_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -694,9 +761,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_http_proxy_resource"] = compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -706,7 +776,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -724,14 +793,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -742,7 +810,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -760,19 +827,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionTargetHttpProxyRequest +): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_http_proxy_resource"] = compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -781,41 +871,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_http_proxy_resource = compute.TargetHttpProxy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - target_http_proxy_resource=target_http_proxy_resource, + target_http_proxy_resource=compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpProxy.to_json( - target_http_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -838,28 +928,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetHttpProxyList( id="id_value", - items=[ - compute.TargetHttpProxy(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -867,22 +953,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetHttpProxy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionTargetHttpProxiesRequest +): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -891,31 +996,35 @@ def test_list_rest_flattened(): return_value = compute.TargetHttpProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -928,13 +1037,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionTargetHttpProxiesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetHttpProxyList( @@ -964,16 +1075,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetHttpProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -985,9 +1095,16 @@ def test_set_url_map_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -997,7 +1114,6 @@ def test_set_url_map_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1015,14 +1131,13 @@ def test_set_url_map_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_url_map(request) @@ -1033,7 +1148,6 @@ def test_set_url_map_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1051,19 +1165,46 @@ def test_set_url_map_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_url_map_rest_bad_request( + transport: str = "rest", request_type=compute.SetUrlMapRegionTargetHttpProxyRequest +): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + def test_set_url_map_rest_from_dict(): test_set_url_map_rest(request_type=dict) -def test_set_url_map_rest_flattened(): +def test_set_url_map_rest_flattened(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1072,41 +1213,44 @@ def test_set_url_map_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_reference_resource = compute.UrlMapReference(url_map="url_map_value") - client.set_url_map( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_http_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_http_proxy="target_http_proxy_value", - url_map_reference_resource=url_map_reference_resource, + url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"), ) + mock_args.update(sample_request) + client.set_url_map(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMapReference.to_json( - url_map_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_url_map_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpProxies/{target_http_proxy}/setUrlMap" + % client.transport._host, + args[1], + ) + + +def test_set_url_map_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1203,8 +1347,10 @@ def test_region_target_http_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_target_http_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1228,29 +1374,6 @@ def test_region_target_http_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_target_http_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_target_http_proxies.transports.RegionTargetHttpProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionTargetHttpProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_target_http_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1262,7 +1385,6 @@ def test_region_target_http_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_target_http_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1278,21 +1400,6 @@ def test_region_target_http_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_target_http_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionTargetHttpProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_target_http_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1439,3 +1546,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionTargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py b/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py index 580ad0a38..2cb70294e 100644 --- a/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py +++ b/tests/unit/gapic/compute_v1/test_region_target_https_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_target_https_proxies import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.region_target_https_proxies import pagers from google.cloud.compute_v1.services.region_target_https_proxies import transports -from google.cloud.compute_v1.services.region_target_https_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -208,7 +192,7 @@ def test_region_target_https_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -217,6 +201,7 @@ def test_region_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -224,7 +209,7 @@ def test_region_target_https_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -233,6 +218,7 @@ def test_region_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -240,7 +226,7 @@ def test_region_target_https_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -249,6 +235,7 @@ def test_region_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -268,7 +255,7 @@ def test_region_target_https_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -277,6 +264,7 @@ def test_region_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -319,7 +307,7 @@ def test_region_target_https_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -336,6 +324,7 @@ def test_region_target_https_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -360,7 +349,7 @@ def test_region_target_https_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -369,6 +358,7 @@ def test_region_target_https_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -381,7 +371,7 @@ def test_region_target_https_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -390,6 +380,7 @@ def test_region_target_https_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -410,7 +401,7 @@ def test_region_target_https_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -419,6 +410,7 @@ def test_region_target_https_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -439,7 +431,7 @@ def test_region_target_https_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -448,6 +440,7 @@ def test_region_target_https_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -458,9 +451,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -470,7 +467,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -488,14 +484,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -506,7 +501,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -524,19 +518,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionTargetHttpsProxyRequest +): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -545,34 +563,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_https_proxy="target_https_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -593,9 +620,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -619,9 +650,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -645,13 +676,40 @@ def test_get_rest( assert response.url_map == "url_map_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionTargetHttpsProxyRequest +): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -660,34 +718,43 @@ def test_get_rest_flattened(): return_value = compute.TargetHttpsProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_https_proxy="target_https_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -708,9 +775,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_https_proxy_resource"] = compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -720,7 +790,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -738,14 +807,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -756,7 +824,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -774,19 +841,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionTargetHttpsProxyRequest +): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_https_proxy_resource"] = compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -795,41 +885,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_https_proxy_resource = compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - target_https_proxy_resource=target_https_proxy_resource, + target_https_proxy_resource=compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpsProxy.to_json( - target_https_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -852,30 +942,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetHttpsProxyList( id="id_value", - items=[ - compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -883,22 +967,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetHttpsProxy(authorization_policy="authorization_policy_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionTargetHttpsProxiesRequest +): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -907,31 +1010,35 @@ def test_list_rest_flattened(): return_value = compute.TargetHttpsProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -944,13 +1051,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionTargetHttpsProxiesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetHttpsProxyList( @@ -980,16 +1089,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetHttpsProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1002,9 +1110,18 @@ def test_set_ssl_certificates_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ] = compute.RegionTargetHttpsProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1014,7 +1131,6 @@ def test_set_ssl_certificates_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1032,14 +1148,13 @@ def test_set_ssl_certificates_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_ssl_certificates(request) @@ -1050,7 +1165,6 @@ def test_set_ssl_certificates_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1068,19 +1182,49 @@ def test_set_ssl_certificates_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_ssl_certificates_rest_bad_request( + transport: str = "rest", + request_type=compute.SetSslCertificatesRegionTargetHttpsProxyRequest, +): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request_init[ + "region_target_https_proxies_set_ssl_certificates_request_resource" + ] = compute.RegionTargetHttpsProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates(request) + + def test_set_ssl_certificates_rest_from_dict(): test_set_ssl_certificates_rest(request_type=dict) -def test_set_ssl_certificates_rest_flattened(): +def test_set_ssl_certificates_rest_flattened(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1089,43 +1233,46 @@ def test_set_ssl_certificates_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_target_https_proxies_set_ssl_certificates_request_resource = compute.RegionTargetHttpsProxiesSetSslCertificatesRequest( - ssl_certificates=["ssl_certificates_value"] - ) - client.set_ssl_certificates( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_https_proxy="target_https_proxy_value", - region_target_https_proxies_set_ssl_certificates_request_resource=region_target_https_proxies_set_ssl_certificates_request_resource, + region_target_https_proxies_set_ssl_certificates_request_resource=compute.RegionTargetHttpsProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ), ) + mock_args.update(sample_request) + client.set_ssl_certificates(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.RegionTargetHttpsProxiesSetSslCertificatesRequest.to_json( - region_target_https_proxies_set_ssl_certificates_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_ssl_certificates_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setSslCertificates" + % client.transport._host, + args[1], + ) + + +def test_set_ssl_certificates_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1149,9 +1296,16 @@ def test_set_url_map_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1161,7 +1315,6 @@ def test_set_url_map_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1179,14 +1332,13 @@ def test_set_url_map_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_url_map(request) @@ -1197,7 +1349,6 @@ def test_set_url_map_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1215,19 +1366,46 @@ def test_set_url_map_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_url_map_rest_bad_request( + transport: str = "rest", request_type=compute.SetUrlMapRegionTargetHttpsProxyRequest +): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + def test_set_url_map_rest_from_dict(): test_set_url_map_rest(request_type=dict) -def test_set_url_map_rest_flattened(): +def test_set_url_map_rest_flattened(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1236,41 +1414,44 @@ def test_set_url_map_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_reference_resource = compute.UrlMapReference(url_map="url_map_value") - client.set_url_map( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_https_proxy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_https_proxy="target_https_proxy_value", - url_map_reference_resource=url_map_reference_resource, + url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"), ) + mock_args.update(sample_request) + client.set_url_map(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMapReference.to_json( - url_map_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_url_map_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetHttpsProxies/{target_https_proxy}/setUrlMap" + % client.transport._host, + args[1], + ) + + +def test_set_url_map_rest_flattened_error(transport: str = "rest"): client = RegionTargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1368,8 +1549,10 @@ def test_region_target_https_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_target_https_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1393,29 +1576,6 @@ def test_region_target_https_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_target_https_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_target_https_proxies.transports.RegionTargetHttpsProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionTargetHttpsProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_target_https_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1427,7 +1587,6 @@ def test_region_target_https_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_target_https_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1443,21 +1602,6 @@ def test_region_target_https_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_target_https_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionTargetHttpsProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_target_https_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1604,3 +1748,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionTargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_region_url_maps.py b/tests/unit/gapic/compute_v1/test_region_url_maps.py index 0c2fa29a7..63c988058 100644 --- a/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.region_url_maps import RegionUrlMapsClient from google.cloud.compute_v1.services.region_url_maps import pagers from google.cloud.compute_v1.services.region_url_maps import transports -from google.cloud.compute_v1.services.region_url_maps.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -194,7 +178,7 @@ def test_region_url_maps_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -203,6 +187,7 @@ def test_region_url_maps_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -210,7 +195,7 @@ def test_region_url_maps_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -219,6 +204,7 @@ def test_region_url_maps_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -226,7 +212,7 @@ def test_region_url_maps_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,6 +221,7 @@ def test_region_url_maps_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -254,7 +241,7 @@ def test_region_url_maps_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -263,6 +250,7 @@ def test_region_url_maps_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -295,7 +283,7 @@ def test_region_url_maps_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -312,6 +300,7 @@ def test_region_url_maps_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -336,7 +325,7 @@ def test_region_url_maps_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -345,6 +334,7 @@ def test_region_url_maps_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -357,7 +347,7 @@ def test_region_url_maps_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -366,6 +356,7 @@ def test_region_url_maps_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -380,7 +371,7 @@ def test_region_url_maps_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -389,6 +380,7 @@ def test_region_url_maps_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -403,7 +395,7 @@ def test_region_url_maps_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -412,6 +404,7 @@ def test_region_url_maps_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -422,9 +415,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -434,7 +427,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -452,14 +444,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -470,7 +461,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -488,18 +478,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -507,31 +519,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "url_map": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", url_map="url_map_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -549,49 +572,29 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionUrlMapR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.UrlMap( creation_timestamp="creation_timestamp_value", - default_route_action=compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ), default_service="default_service_value", - default_url_redirect=compute.HttpRedirectAction( - host_redirect="host_redirect_value" - ), description="description_value", fingerprint="fingerprint_value", - header_action=compute.HttpHeaderAction( - request_headers_to_add=[ - compute.HttpHeaderOption(header_name="header_name_value") - ] - ), - host_rules=[compute.HostRule(description="description_value")], id=205, kind="kind_value", name="name_value", - path_matchers=[ - compute.PathMatcher( - default_route_action=compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ) - ) - ], region="region_value", self_link="self_link_value", - tests=[compute.UrlMapTest(description="description_value")], ) # Wrap the value into a proper Response obj - json_return_value = compute.UrlMap.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMap.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -599,42 +602,47 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionUrlMapR # Establish that the response is the type that we expect. assert isinstance(response, compute.UrlMap) assert response.creation_timestamp == "creation_timestamp_value" - assert response.default_route_action == compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ) assert response.default_service == "default_service_value" - assert response.default_url_redirect == compute.HttpRedirectAction( - host_redirect="host_redirect_value" - ) assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" - assert response.header_action == compute.HttpHeaderAction( - request_headers_to_add=[ - compute.HttpHeaderOption(header_name="header_name_value") - ] - ) - assert response.host_rules == [compute.HostRule(description="description_value")] assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.path_matchers == [ - compute.PathMatcher( - default_route_action=compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ) - ) - ] assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.tests == [compute.UrlMapTest(description="description_value")] + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -642,31 +650,42 @@ def test_get_rest_flattened(): return_value = compute.UrlMap() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMap.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMap.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "url_map": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", url_map="url_map_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -686,9 +705,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -698,7 +720,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -716,14 +737,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -734,7 +754,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -752,18 +771,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -771,38 +815,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_resource = compute.UrlMap(creation_timestamp="creation_timestamp_value") - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - url_map_resource=url_map_resource, + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMap.to_json( - url_map_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -824,26 +872,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.UrlMapList( id="id_value", - items=[compute.UrlMap(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -851,21 +897,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.UrlMap(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionUrlMapsRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -873,30 +940,36 @@ def test_list_rest_flattened(): return_value = compute.UrlMapList() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -908,11 +981,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.UrlMapList( @@ -934,16 +1009,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.UrlMap) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -955,9 +1029,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -967,7 +1044,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -985,14 +1061,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1003,7 +1078,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1021,18 +1095,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1040,40 +1139,47 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_resource = compute.UrlMap(creation_timestamp="creation_timestamp_value") - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "url_map": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", url_map="url_map_value", - url_map_resource=url_map_resource, + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMap.to_json( - url_map_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1096,9 +1202,12 @@ def test_update_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1108,7 +1217,6 @@ def test_update_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1126,14 +1234,13 @@ def test_update_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1144,7 +1251,6 @@ def test_update_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1162,18 +1268,43 @@ def test_update_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1181,40 +1312,47 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_resource = compute.UrlMap(creation_timestamp="creation_timestamp_value") - client.update( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "url_map": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", url_map="url_map_value", - url_map_resource=url_map_resource, + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMap.to_json( - url_map_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1237,38 +1375,68 @@ def test_validate_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request_init[ + "region_url_maps_validate_request_resource" + ] = compute.RegionUrlMapsValidateRequest( + resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.UrlMapsValidateResponse( - result=compute.UrlMapValidationResult(load_errors=["load_errors_value"]), - ) + return_value = compute.UrlMapsValidateResponse() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.validate(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.UrlMapsValidateResponse) - assert response.result == compute.UrlMapValidationResult( - load_errors=["load_errors_value"] + + +def test_validate_rest_bad_request( + transport: str = "rest", request_type=compute.ValidateRegionUrlMapRequest +): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "url_map": "sample3"} + request_init[ + "region_url_maps_validate_request_resource" + ] = compute.RegionUrlMapsValidateRequest( + resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate(request) def test_validate_rest_from_dict(): test_validate_rest(request_type=dict) -def test_validate_rest_flattened(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_validate_rest_flattened(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1276,42 +1444,47 @@ def test_validate_rest_flattened(): return_value = compute.UrlMapsValidateResponse() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_url_maps_validate_request_resource = compute.RegionUrlMapsValidateRequest( - resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") - ) - client.validate( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "url_map": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", url_map="url_map_value", - region_url_maps_validate_request_resource=region_url_maps_validate_request_resource, + region_url_maps_validate_request_resource=compute.RegionUrlMapsValidateRequest( + resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") + ), ) + mock_args.update(sample_request) + client.validate(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.RegionUrlMapsValidateRequest.to_json( - region_url_maps_validate_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_validate_rest_flattened_error(): - client = RegionUrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/urlMaps/{url_map}/validate" + % client.transport._host, + args[1], + ) + + +def test_validate_rest_flattened_error(transport: str = "rest"): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1409,8 +1582,10 @@ def test_region_url_maps_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_region_url_maps_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1434,29 +1609,6 @@ def test_region_url_maps_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_region_url_maps_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.region_url_maps.transports.RegionUrlMapsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionUrlMapsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_region_url_maps_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1468,7 +1620,6 @@ def test_region_url_maps_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_region_url_maps_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1484,21 +1635,6 @@ def test_region_url_maps_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_region_url_maps_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionUrlMapsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_region_url_maps_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1645,3 +1781,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionUrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_regions.py b/tests/unit/gapic/compute_v1/test_regions.py index a4bbce669..af9bdab57 100644 --- a/tests/unit/gapic/compute_v1/test_regions.py +++ b/tests/unit/gapic/compute_v1/test_regions.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.regions import RegionsClient from google.cloud.compute_v1.services.regions import pagers from google.cloud.compute_v1.services.regions import transports -from google.cloud.compute_v1.services.regions.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -182,7 +166,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -191,6 +175,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -198,7 +183,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -207,6 +192,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,7 +200,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,6 +209,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -242,7 +229,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +238,7 @@ def test_regions_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -281,7 +269,7 @@ def test_regions_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -298,6 +286,7 @@ def test_regions_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -322,7 +311,7 @@ def test_regions_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -331,6 +320,7 @@ def test_regions_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -343,7 +333,7 @@ def test_regions_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -352,6 +342,7 @@ def test_regions_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -366,7 +357,7 @@ def test_regions_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -375,6 +366,7 @@ def test_regions_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -389,7 +381,7 @@ def test_regions_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -398,6 +390,7 @@ def test_regions_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -406,21 +399,19 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionRequest credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Region( creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", id=205, kind="kind_value", name="name_value", - quotas=[compute.Quota(limit=0.543)], self_link="self_link_value", status=compute.Region.Status.DOWN, supports_pzs=True, @@ -428,9 +419,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionRequest ) # Wrap the value into a proper Response obj - json_return_value = compute.Region.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Region.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -438,24 +429,47 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRegionRequest # Establish that the response is the type that we expect. assert isinstance(response, compute.Region) assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.quotas == [compute.Quota(limit=0.543)] assert response.self_link == "self_link_value" assert response.status == compute.Region.Status.DOWN assert response.supports_pzs is True assert response.zones == ["zones_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRegionRequest +): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RegionsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -463,30 +477,36 @@ def test_get_rest_flattened(): return_value = compute.Region() # Wrap the value into a proper Response obj - json_return_value = compute.Region.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Region.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RegionsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -501,26 +521,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListRegionsRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RegionList( id="id_value", - items=[compute.Region(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RegionList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -528,21 +546,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListRegionsRequ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Region(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRegionsRequest +): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RegionsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -550,27 +589,35 @@ def test_list_rest_flattened(): return_value = compute.RegionList() # Wrap the value into a proper Response obj - json_return_value = compute.RegionList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RegionList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions" % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RegionsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -580,11 +627,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RegionsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RegionList( @@ -606,16 +655,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Region) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -697,8 +745,10 @@ def test_regions_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_regions_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -723,30 +773,6 @@ def test_regions_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_regions_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.regions.transports.RegionsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RegionsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_regions_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -758,7 +784,6 @@ def test_regions_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_regions_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -775,22 +800,6 @@ def test_regions_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_regions_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RegionsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_regions_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -937,3 +946,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_reservations.py b/tests/unit/gapic/compute_v1/test_reservations.py index 7996bee17..ba6684540 100644 --- a/tests/unit/gapic/compute_v1/test_reservations.py +++ b/tests/unit/gapic/compute_v1/test_reservations.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.reservations import ReservationsClient from google.cloud.compute_v1.services.reservations import pagers from google.cloud.compute_v1.services.reservations import transports -from google.cloud.compute_v1.services.reservations.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_reservations_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_reservations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_reservations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_reservations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_reservations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_reservations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_reservations_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_reservations_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_reservations_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_reservations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_reservations_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_reservations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_reservations_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_reservations_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_reservations_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_reservations_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_reservations_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_reservations_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,31 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ReservationAggregatedList( id="id_value", - items={ - "key_value": compute.ReservationsScopedList( - reservations=[compute.Reservation(commitment="commitment_value")] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ReservationAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ReservationAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -447,24 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.ReservationsScopedList( - reservations=[compute.Reservation(commitment="commitment_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListReservationsRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -472,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.ReservationAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.ReservationAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ReservationAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/reservations" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -502,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ReservationAggregatedList( @@ -539,10 +556,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.ReservationsScopedList) assert pager.get("h") is None @@ -560,7 +576,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.ReservationsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -572,9 +588,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -584,7 +600,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -602,14 +617,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -620,7 +634,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -638,18 +651,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -657,31 +692,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "reservation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", reservation="reservation_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "reservation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -699,9 +745,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetReservationRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -715,16 +761,15 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetReservationRe name="name_value", satisfies_pzs=True, self_link="self_link_value", - specific_reservation=compute.AllocationSpecificSKUReservation(count=553), specific_reservation_required=True, status=compute.Reservation.Status.CREATING, zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Reservation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Reservation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -739,20 +784,42 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetReservationRe assert response.name == "name_value" assert response.satisfies_pzs is True assert response.self_link == "self_link_value" - assert response.specific_reservation == compute.AllocationSpecificSKUReservation( - count=553 - ) assert response.specific_reservation_required is True assert response.status == compute.Reservation.Status.CREATING assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -760,31 +827,42 @@ def test_get_rest_flattened(): return_value = compute.Reservation() # Wrap the value into a proper Response obj - json_return_value = compute.Reservation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Reservation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "reservation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", reservation="reservation_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "reservation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -804,60 +882,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -865,31 +944,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -909,9 +999,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["reservation_resource"] = compute.Reservation( + commitment="commitment_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -921,7 +1014,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -939,14 +1031,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -957,7 +1048,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -975,18 +1065,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["reservation_resource"] = compute.Reservation( + commitment="commitment_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -994,38 +1109,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - reservation_resource = compute.Reservation(commitment="commitment_value") - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - reservation_resource=reservation_resource, + reservation_resource=compute.Reservation(commitment="commitment_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.Reservation.to_json( - reservation_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1045,26 +1162,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ReservationList( id="id_value", - items=[compute.Reservation(commitment="commitment_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ReservationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ReservationList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1072,19 +1187,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Reservation(commitment="commitment_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListReservationsRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1092,30 +1230,36 @@ def test_list_rest_flattened(): return_value = compute.ReservationList() # Wrap the value into a proper Response obj - json_return_value = compute.ReservationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ReservationList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1127,11 +1271,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ReservationList( @@ -1161,16 +1307,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Reservation) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1182,9 +1327,12 @@ def test_resize_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} + request_init[ + "reservations_resize_request_resource" + ] = compute.ReservationsResizeRequest(specific_sku_count=1920) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1194,7 +1342,6 @@ def test_resize_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1212,14 +1359,13 @@ def test_resize_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.resize(request) @@ -1230,7 +1376,6 @@ def test_resize_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1248,18 +1393,43 @@ def test_resize_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_resize_rest_bad_request( + transport: str = "rest", request_type=compute.ResizeReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "reservation": "sample3"} + request_init[ + "reservations_resize_request_resource" + ] = compute.ReservationsResizeRequest(specific_sku_count=1920) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.resize(request) + + def test_resize_rest_from_dict(): test_resize_rest(request_type=dict) -def test_resize_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_resize_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1267,42 +1437,47 @@ def test_resize_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - reservations_resize_request_resource = compute.ReservationsResizeRequest( - specific_sku_count=1920 - ) - client.resize( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "reservation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", reservation="reservation_value", - reservations_resize_request_resource=reservations_resize_request_resource, + reservations_resize_request_resource=compute.ReservationsResizeRequest( + specific_sku_count=1920 + ), ) + mock_args.update(sample_request) + client.resize(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "reservation_value" in http_call[1] + str(body) + str(params) - assert compute.ReservationsResizeRequest.to_json( - reservations_resize_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_resize_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations/{reservation}/resize" + % client.transport._host, + args[1], + ) + + +def test_resize_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1325,60 +1500,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1386,42 +1568,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - zone_set_policy_request_resource = compute.ZoneSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - zone_set_policy_request_resource=zone_set_policy_request_resource, + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.ZoneSetPolicyRequest.to_json( - zone_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1444,9 +1631,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1456,9 +1646,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1468,12 +1658,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsReservationRequest +): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1481,42 +1699,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = ReservationsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/reservations/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1616,8 +1839,10 @@ def test_reservations_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_reservations_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1641,29 +1866,6 @@ def test_reservations_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_reservations_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.reservations.transports.ReservationsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ReservationsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_reservations_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1675,7 +1877,6 @@ def test_reservations_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_reservations_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1691,21 +1892,6 @@ def test_reservations_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_reservations_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ReservationsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_reservations_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1852,3 +2038,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ReservationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_resource_policies.py b/tests/unit/gapic/compute_v1/test_resource_policies.py index 8831229a6..72df7f76c 100644 --- a/tests/unit/gapic/compute_v1/test_resource_policies.py +++ b/tests/unit/gapic/compute_v1/test_resource_policies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.resource_policies import ResourcePoliciesClient from google.cloud.compute_v1.services.resource_policies import pagers from google.cloud.compute_v1.services.resource_policies import transports -from google.cloud.compute_v1.services.resource_policies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_resource_policies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_resource_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_resource_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_resource_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_resource_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_resource_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_resource_policies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_resource_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_resource_policies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_resource_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_resource_policies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_resource_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_resource_policies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_resource_policies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_resource_policies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_resource_policies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_resource_policies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_resource_policies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -444,26 +437,16 @@ def test_aggregated_list_rest( return_value = compute.ResourcePolicyAggregatedList( etag="etag_value", id="id_value", - items={ - "key_value": compute.ResourcePoliciesScopedList( - resource_policies=[ - compute.ResourcePolicy( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ResourcePolicyAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ResourcePolicyAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -472,26 +455,43 @@ def test_aggregated_list_rest( assert isinstance(response, pagers.AggregatedListPager) assert response.etag == "etag_value" assert response.id == "id_value" - assert response.items == { - "key_value": compute.ResourcePoliciesScopedList( - resource_policies=[ - compute.ResourcePolicy(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListResourcePoliciesRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -499,27 +499,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.ResourcePolicyAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.ResourcePolicyAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ResourcePolicyAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/resourcePolicies" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -529,11 +538,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ResourcePolicyAggregatedList( @@ -569,10 +580,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.ResourcePoliciesScopedList) assert pager.get("h") is None @@ -590,7 +600,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.ResourcePoliciesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -602,9 +612,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "resource_policy": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -614,7 +628,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -632,14 +645,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -650,7 +662,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -668,18 +679,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteResourcePolicyRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "resource_policy": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -687,33 +724,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource_policy="resource_policy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -733,9 +781,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "resource_policy": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -743,34 +795,18 @@ def test_get_rest( return_value = compute.ResourcePolicy( creation_timestamp="creation_timestamp_value", description="description_value", - group_placement_policy=compute.ResourcePolicyGroupPlacementPolicy( - availability_domain_count=2650 - ), id=205, - instance_schedule_policy=compute.ResourcePolicyInstanceSchedulePolicy( - expiration_time="expiration_time_value" - ), kind="kind_value", name="name_value", region="region_value", - resource_status=compute.ResourcePolicyResourceStatus( - instance_schedule_policy=compute.ResourcePolicyResourceStatusInstanceSchedulePolicyStatus( - last_run_start_time="last_run_start_time_value" - ) - ), self_link="self_link_value", - snapshot_schedule_policy=compute.ResourcePolicySnapshotSchedulePolicy( - retention_policy=compute.ResourcePolicySnapshotSchedulePolicyRetentionPolicy( - max_retention_days=1933 - ) - ), status=compute.ResourcePolicy.Status.CREATING, ) # Wrap the value into a proper Response obj - json_return_value = compute.ResourcePolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ResourcePolicy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -779,43 +815,49 @@ def test_get_rest( assert isinstance(response, compute.ResourcePolicy) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" - assert ( - response.group_placement_policy - == compute.ResourcePolicyGroupPlacementPolicy(availability_domain_count=2650) - ) assert response.id == 205 - assert ( - response.instance_schedule_policy - == compute.ResourcePolicyInstanceSchedulePolicy( - expiration_time="expiration_time_value" - ) - ) assert response.kind == "kind_value" assert response.name == "name_value" assert response.region == "region_value" - assert response.resource_status == compute.ResourcePolicyResourceStatus( - instance_schedule_policy=compute.ResourcePolicyResourceStatusInstanceSchedulePolicyStatus( - last_run_start_time="last_run_start_time_value" - ) - ) assert response.self_link == "self_link_value" - assert ( - response.snapshot_schedule_policy - == compute.ResourcePolicySnapshotSchedulePolicy( - retention_policy=compute.ResourcePolicySnapshotSchedulePolicyRetentionPolicy( - max_retention_days=1933 - ) - ) - ) assert response.status == compute.ResourcePolicy.Status.CREATING +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetResourcePolicyRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "resource_policy": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -823,33 +865,44 @@ def test_get_rest_flattened(): return_value = compute.ResourcePolicy() # Wrap the value into a proper Response obj - json_return_value = compute.ResourcePolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ResourcePolicy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource_policy="resource_policy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource_policy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -869,60 +922,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyResourcePolicyRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -930,31 +984,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -974,9 +1039,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["resource_policy_resource"] = compute.ResourcePolicy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -986,7 +1054,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1004,14 +1071,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1022,7 +1088,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1040,18 +1105,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertResourcePolicyRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["resource_policy_resource"] = compute.ResourcePolicy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1059,40 +1149,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - resource_policy_resource = compute.ResourcePolicy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - resource_policy_resource=resource_policy_resource, + resource_policy_resource=compute.ResourcePolicy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.ResourcePolicy.to_json( - resource_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1114,9 +1206,9 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1124,19 +1216,15 @@ def test_list_rest( return_value = compute.ResourcePolicyList( etag="etag_value", id="id_value", - items=[ - compute.ResourcePolicy(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ResourcePolicyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ResourcePolicyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1145,21 +1233,42 @@ def test_list_rest( assert isinstance(response, pagers.ListPager) assert response.etag == "etag_value" assert response.id == "id_value" - assert response.items == [ - compute.ResourcePolicy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListResourcePoliciesRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1167,30 +1276,36 @@ def test_list_rest_flattened(): return_value = compute.ResourcePolicyList() # Wrap the value into a proper Response obj - json_return_value = compute.ResourcePolicyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ResourcePolicyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1202,11 +1317,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ResourcePolicyList( @@ -1236,16 +1353,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ResourcePolicy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1257,60 +1373,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyResourcePolicyRequest +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1318,42 +1441,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_policy_request_resource = compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=region_set_policy_request_resource, + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetPolicyRequest.to_json( - region_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1377,9 +1505,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1389,9 +1520,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1401,12 +1532,41 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsResourcePolicyRequest, +): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1414,42 +1574,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = ResourcePoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/resourcePolicies/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1548,8 +1713,10 @@ def test_resource_policies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_resource_policies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1573,29 +1740,6 @@ def test_resource_policies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_resource_policies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.resource_policies.transports.ResourcePoliciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ResourcePoliciesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_resource_policies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1607,7 +1751,6 @@ def test_resource_policies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_resource_policies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1623,21 +1766,6 @@ def test_resource_policies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_resource_policies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ResourcePoliciesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_resource_policies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1784,3 +1912,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ResourcePoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_routers.py b/tests/unit/gapic/compute_v1/test_routers.py index 92e1c8043..372feddc2 100644 --- a/tests/unit/gapic/compute_v1/test_routers.py +++ b/tests/unit/gapic/compute_v1/test_routers.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.routers import RoutersClient from google.cloud.compute_v1.services.routers import pagers from google.cloud.compute_v1.services.routers import transports -from google.cloud.compute_v1.services.routers.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -182,7 +166,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -191,6 +175,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -198,7 +183,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -207,6 +192,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,7 +200,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,6 +209,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -242,7 +229,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -251,6 +238,7 @@ def test_routers_client_client_options(client_class, transport_class, transport_ client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -281,7 +269,7 @@ def test_routers_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -298,6 +286,7 @@ def test_routers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -322,7 +311,7 @@ def test_routers_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -331,6 +320,7 @@ def test_routers_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -343,7 +333,7 @@ def test_routers_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -352,6 +342,7 @@ def test_routers_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -366,7 +357,7 @@ def test_routers_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -375,6 +366,7 @@ def test_routers_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -389,7 +381,7 @@ def test_routers_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -398,6 +390,7 @@ def test_routers_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -408,37 +401,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RouterAggregatedList( id="id_value", - items={ - "key_value": compute.RoutersScopedList( - routers=[ - compute.Router( - bgp=compute.RouterBgp( - advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM - ) - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RouterAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouterAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -446,30 +427,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.RoutersScopedList( - routers=[ - compute.Router( - bgp=compute.RouterBgp( - advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM - ) - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListRoutersRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -477,27 +471,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.RouterAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.RouterAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouterAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/routers" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -507,11 +510,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RouterAggregatedList( @@ -544,10 +549,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.RoutersScopedList) assert pager.get("h") is None @@ -562,7 +566,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.RoutersScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -572,9 +576,9 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouterR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -584,7 +588,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouterR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -602,14 +605,13 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouterR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -620,7 +622,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouterR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -638,18 +639,40 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouterR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -657,31 +680,42 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -699,70 +733,77 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRouterRequest credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Router( - bgp=compute.RouterBgp( - advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM - ), - bgp_peers=[ - compute.RouterBgpPeer( - advertise_mode=compute.RouterBgpPeer.AdvertiseMode.CUSTOM - ) - ], creation_timestamp="creation_timestamp_value", description="description_value", encrypted_interconnect_router=True, id=205, - interfaces=[compute.RouterInterface(ip_range="ip_range_value")], kind="kind_value", name="name_value", - nats=[compute.RouterNat(drain_nat_ips=["drain_nat_ips_value"])], network="network_value", region="region_value", self_link="self_link_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Router.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Router.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Router) - assert response.bgp == compute.RouterBgp( - advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM - ) - assert response.bgp_peers == [ - compute.RouterBgpPeer(advertise_mode=compute.RouterBgpPeer.AdvertiseMode.CUSTOM) - ] assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.encrypted_interconnect_router is True assert response.id == 205 - assert response.interfaces == [compute.RouterInterface(ip_range="ip_range_value")] assert response.kind == "kind_value" assert response.name == "name_value" - assert response.nats == [compute.RouterNat(drain_nat_ips=["drain_nat_ips_value"])] assert response.network == "network_value" assert response.region == "region_value" assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -770,31 +811,42 @@ def test_get_rest_flattened(): return_value = compute.Router() # Wrap the value into a proper Response obj - json_return_value = compute.Router.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Router.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -814,9 +866,9 @@ def test_get_nat_mapping_info_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -825,15 +877,13 @@ def test_get_nat_mapping_info_rest( id="id_value", kind="kind_value", next_page_token="next_page_token_value", - result=[compute.VmEndpointNatMappings(instance_name="instance_name_value")], self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.VmEndpointNatMappingsList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VmEndpointNatMappingsList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_nat_mapping_info(request) @@ -843,19 +893,40 @@ def test_get_nat_mapping_info_rest( assert response.id == "id_value" assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.result == [ - compute.VmEndpointNatMappings(instance_name="instance_name_value") - ] assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_get_nat_mapping_info_rest_bad_request( + transport: str = "rest", request_type=compute.GetNatMappingInfoRoutersRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_nat_mapping_info(request) def test_get_nat_mapping_info_rest_from_dict(): test_get_nat_mapping_info_rest(request_type=dict) -def test_get_nat_mapping_info_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_nat_mapping_info_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -863,31 +934,42 @@ def test_get_nat_mapping_info_rest_flattened(): return_value = compute.VmEndpointNatMappingsList() # Wrap the value into a proper Response obj - json_return_value = compute.VmEndpointNatMappingsList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VmEndpointNatMappingsList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_nat_mapping_info( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", ) + mock_args.update(sample_request) + client.get_nat_mapping_info(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/getNatMappingInfo" + % client.transport._host, + args[1], + ) -def test_get_nat_mapping_info_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_nat_mapping_info_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -900,11 +982,13 @@ def test_get_nat_mapping_info_rest_flattened_error(): ) -def test_get_nat_mapping_info_pager(): +def test_get_nat_mapping_info_rest_pager(): client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.VmEndpointNatMappingsList( @@ -937,16 +1021,19 @@ def test_get_nat_mapping_info_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.get_nat_mapping_info(request={}) + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } - assert pager._metadata == metadata + pager = client.get_nat_mapping_info(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.VmEndpointNatMappings) for i in results) - pages = list(client.get_nat_mapping_info(request={}).pages) + pages = list(client.get_nat_mapping_info(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -958,26 +1045,19 @@ def test_get_router_status_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.RouterStatusResponse( - kind="kind_value", - result=compute.RouterStatus( - best_routes=[ - compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]) - ] - ), - ) + return_value = compute.RouterStatusResponse(kind="kind_value",) # Wrap the value into a proper Response obj - json_return_value = compute.RouterStatusResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouterStatusResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_router_status(request) @@ -985,17 +1065,39 @@ def test_get_router_status_rest( # Establish that the response is the type that we expect. assert isinstance(response, compute.RouterStatusResponse) assert response.kind == "kind_value" - assert response.result == compute.RouterStatus( - best_routes=[compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])])] + + +def test_get_router_status_rest_bad_request( + transport: str = "rest", request_type=compute.GetRouterStatusRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_router_status(request) + def test_get_router_status_rest_from_dict(): test_get_router_status_rest(request_type=dict) -def test_get_router_status_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_router_status_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1003,31 +1105,42 @@ def test_get_router_status_rest_flattened(): return_value = compute.RouterStatusResponse() # Wrap the value into a proper Response obj - json_return_value = compute.RouterStatusResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouterStatusResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_router_status( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", ) + mock_args.update(sample_request) + client.get_router_status(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/getRouterStatus" + % client.transport._host, + args[1], + ) -def test_get_router_status_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_router_status_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1045,9 +1158,12 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouterR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1057,7 +1173,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouterR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1075,14 +1190,13 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouterR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1093,7 +1207,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouterR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1111,18 +1224,43 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouterR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1130,40 +1268,44 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - router_resource = compute.Router( - bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - router_resource=router_resource, + router_resource=compute.Router( + bgp=compute.RouterBgp( + advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM + ) + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Router.to_json( - router_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1185,32 +1327,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListRoutersRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RouterList( id="id_value", - items=[ - compute.Router( - bgp=compute.RouterBgp( - advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM - ) - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RouterList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouterList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1218,23 +1352,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListRoutersRequ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Router( - bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRoutersRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1242,30 +1395,36 @@ def test_list_rest_flattened(): return_value = compute.RouterList() # Wrap the value into a proper Response obj - json_return_value = compute.RouterList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouterList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1277,11 +1436,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RouterList( @@ -1303,16 +1464,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Router) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1322,9 +1482,12 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchRouterReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1334,7 +1497,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchRouterReq creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1352,14 +1514,13 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchRouterReq target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1370,7 +1531,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchRouterReq assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1388,18 +1548,43 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchRouterReq assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1407,42 +1592,49 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - router_resource = compute.Router( - bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", - router_resource=router_resource, + router_resource=compute.Router( + bgp=compute.RouterBgp( + advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM + ) + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) - assert compute.Router.to_json( - router_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1467,42 +1659,64 @@ def test_preview_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.RoutersPreviewResponse( - resource=compute.Router( - bgp=compute.RouterBgp( - advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM - ) - ), - ) + return_value = compute.RoutersPreviewResponse() # Wrap the value into a proper Response obj - json_return_value = compute.RoutersPreviewResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RoutersPreviewResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.preview(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.RoutersPreviewResponse) - assert response.resource == compute.Router( + + +def test_preview_rest_bad_request( + transport: str = "rest", request_type=compute.PreviewRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request_init["router_resource"] = compute.Router( bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.preview(request) def test_preview_rest_from_dict(): test_preview_rest(request_type=dict) -def test_preview_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_preview_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1510,42 +1724,49 @@ def test_preview_rest_flattened(): return_value = compute.RoutersPreviewResponse() # Wrap the value into a proper Response obj - json_return_value = compute.RoutersPreviewResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RoutersPreviewResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - router_resource = compute.Router( - bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) - ) - client.preview( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", - router_resource=router_resource, + router_resource=compute.Router( + bgp=compute.RouterBgp( + advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM + ) + ), ) + mock_args.update(sample_request) + client.preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) - assert compute.Router.to_json( - router_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_preview_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}/preview" + % client.transport._host, + args[1], + ) + + +def test_preview_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1568,9 +1789,12 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateRouterR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1580,7 +1804,6 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateRouterR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1598,14 +1821,13 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateRouterR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1616,7 +1838,6 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateRouterR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1634,18 +1855,43 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateRouterR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateRouterRequest +): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "router": "sample3"} + request_init["router_resource"] = compute.Router( + bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1653,42 +1899,49 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - router_resource = compute.Router( - bgp=compute.RouterBgp(advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM) - ) - client.update( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "router": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", router="router_value", - router_resource=router_resource, + router_resource=compute.Router( + bgp=compute.RouterBgp( + advertise_mode=compute.RouterBgp.AdvertiseMode.CUSTOM + ) + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "router_value" in http_call[1] + str(body) + str(params) - assert compute.Router.to_json( - router_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = RoutersClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/routers/{router}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1791,8 +2044,10 @@ def test_routers_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_routers_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1816,29 +2071,6 @@ def test_routers_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_routers_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.routers.transports.RoutersTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RoutersTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_routers_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1850,7 +2082,6 @@ def test_routers_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_routers_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1866,21 +2097,6 @@ def test_routers_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_routers_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RoutersClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_routers_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2027,3 +2243,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RoutersClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_routes.py b/tests/unit/gapic/compute_v1/test_routes.py index bc31a26d7..b864d0b55 100644 --- a/tests/unit/gapic/compute_v1/test_routes.py +++ b/tests/unit/gapic/compute_v1/test_routes.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,31 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.routes import RoutesClient from google.cloud.compute_v1.services.routes import pagers from google.cloud.compute_v1.services.routes import transports -from google.cloud.compute_v1.services.routes.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -180,7 +166,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -189,6 +175,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -196,7 +183,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +192,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +200,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +209,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -240,7 +229,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -249,6 +238,7 @@ def test_routes_client_client_options(client_class, transport_class, transport_n client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -279,7 +269,7 @@ def test_routes_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -296,6 +286,7 @@ def test_routes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -320,7 +311,7 @@ def test_routes_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -329,6 +320,7 @@ def test_routes_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -341,7 +333,7 @@ def test_routes_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,6 +342,7 @@ def test_routes_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -364,7 +357,7 @@ def test_routes_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -373,6 +366,7 @@ def test_routes_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -387,7 +381,7 @@ def test_routes_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -396,6 +390,7 @@ def test_routes_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -404,9 +399,9 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouteRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "route": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -416,7 +411,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouteRe creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -434,14 +428,13 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouteRe target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -452,7 +445,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouteRe assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -470,18 +462,40 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteRouteRe assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteRouteRequest +): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "route": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -489,30 +503,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", route="route_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "route": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", route="route_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "route_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/routes/{route}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -527,15 +547,14 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRouteRequest) credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "route": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Route( - as_paths=[compute.RouteAsPath(as_lists=[866])], creation_timestamp="creation_timestamp_value", description="description_value", dest_range="dest_range_value", @@ -554,20 +573,18 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRouteRequest) route_type=compute.Route.RouteType.BGP, self_link="self_link_value", tags=["tags_value"], - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], ) # Wrap the value into a proper Response obj - json_return_value = compute.Route.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Route.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Route) - assert response.as_paths == [compute.RouteAsPath(as_lists=[866])] assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.dest_range == "dest_range_value" @@ -586,17 +603,39 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetRouteRequest) assert response.route_type == compute.Route.RouteType.BGP assert response.self_link == "self_link_value" assert response.tags == ["tags_value"] - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetRouteRequest +): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "route": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -604,30 +643,36 @@ def test_get_rest_flattened(): return_value = compute.Route() # Wrap the value into a proper Response obj - json_return_value = compute.Route.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Route.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", route="route_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "route": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", route="route_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "route_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/routes/{route}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -642,9 +687,12 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouteRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["route_resource"] = compute.Route( + as_paths=[compute.RouteAsPath(as_lists=[866])] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -654,7 +702,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouteRe creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -672,14 +719,13 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouteRe target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -690,7 +736,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouteRe assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -708,18 +753,43 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertRouteRe assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertRouteRequest +): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["route_resource"] = compute.Route( + as_paths=[compute.RouteAsPath(as_lists=[866])] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -727,35 +797,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - route_resource = compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]) - client.insert( - project="project_value", route_resource=route_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + route_resource=compute.Route( + as_paths=[compute.RouteAsPath(as_lists=[866])] + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.Route.to_json( - route_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/routes" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -774,26 +850,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListRoutesReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.RouteList( id="id_value", - items=[compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])])], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.RouteList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouteList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -801,21 +875,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListRoutesReque # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Route(as_paths=[compute.RouteAsPath(as_lists=[866])]) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListRoutesRequest +): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -823,27 +918,36 @@ def test_list_rest_flattened(): return_value = compute.RouteList() # Wrap the value into a proper Response obj - json_return_value = compute.RouteList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.RouteList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/routes" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -853,11 +957,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = RoutesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.RouteList( @@ -879,16 +985,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Route) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -972,8 +1077,10 @@ def test_routes_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_routes_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -997,29 +1104,6 @@ def test_routes_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_routes_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.routes.transports.RoutesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.RoutesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_routes_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1031,7 +1115,6 @@ def test_routes_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_routes_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1047,21 +1130,6 @@ def test_routes_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_routes_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - RoutesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_routes_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1208,3 +1276,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RoutesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_security_policies.py b/tests/unit/gapic/compute_v1/test_security_policies.py index d923c3993..4c5f7adc4 100644 --- a/tests/unit/gapic/compute_v1/test_security_policies.py +++ b/tests/unit/gapic/compute_v1/test_security_policies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.security_policies import SecurityPoliciesClient from google.cloud.compute_v1.services.security_policies import pagers from google.cloud.compute_v1.services.security_policies import transports -from google.cloud.compute_v1.services.security_policies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_security_policies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_security_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_security_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_security_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_security_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_security_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_security_policies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_security_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_security_policies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_security_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_security_policies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_security_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_security_policies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_security_policies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_security_policies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_security_policies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_security_policies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_security_policies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,12 @@ def test_add_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request_init["security_policy_rule_resource"] = compute.SecurityPolicyRule( + action="action_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +442,6 @@ def test_add_rule_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +459,13 @@ def test_add_rule_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_rule(request) @@ -482,7 +476,6 @@ def test_add_rule_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +493,43 @@ def test_add_rule_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_rule_rest_bad_request( + transport: str = "rest", request_type=compute.AddRuleSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request_init["security_policy_rule_resource"] = compute.SecurityPolicyRule( + action="action_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_rule(request) + + def test_add_rule_rest_from_dict(): test_add_rule_rest(request_type=dict) -def test_add_rule_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_rule_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,40 +537,42 @@ def test_add_rule_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - security_policy_rule_resource = compute.SecurityPolicyRule( - action="action_value" - ) - client.add_rule( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", - security_policy_rule_resource=security_policy_rule_resource, + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) + mock_args.update(sample_request) + client.add_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) - assert compute.SecurityPolicyRule.to_json( - security_policy_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_rule_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/addRule" + % client.transport._host, + args[1], + ) + + +def test_add_rule_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -574,9 +594,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -586,7 +606,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -604,14 +623,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -622,7 +640,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -640,18 +657,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -659,30 +698,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -701,72 +748,73 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ), - advanced_options_config=compute.SecurityPolicyAdvancedOptionsConfig( - json_parsing=compute.SecurityPolicyAdvancedOptionsConfig.JsonParsing.DISABLED - ), creation_timestamp="creation_timestamp_value", description="description_value", fingerprint="fingerprint_value", id=205, kind="kind_value", name="name_value", - rules=[compute.SecurityPolicyRule(action="action_value")], self_link="self_link_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.SecurityPolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SecurityPolicy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.SecurityPolicy) - assert ( - response.adaptive_protection_config - == compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ) - assert ( - response.advanced_options_config - == compute.SecurityPolicyAdvancedOptionsConfig( - json_parsing=compute.SecurityPolicyAdvancedOptionsConfig.JsonParsing.DISABLED - ) - ) assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.rules == [compute.SecurityPolicyRule(action="action_value")] assert response.self_link == "self_link_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -774,30 +822,38 @@ def test_get_rest_flattened(): return_value = compute.SecurityPolicy() # Wrap the value into a proper Response obj - json_return_value = compute.SecurityPolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SecurityPolicy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -816,9 +872,9 @@ def test_get_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -827,19 +883,14 @@ def test_get_rule_rest( action="action_value", description="description_value", kind="kind_value", - match=compute.SecurityPolicyRuleMatcher( - config=compute.SecurityPolicyRuleMatcherConfig( - src_ip_ranges=["src_ip_ranges_value"] - ) - ), preview=True, priority=898, ) # Wrap the value into a proper Response obj - json_return_value = compute.SecurityPolicyRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SecurityPolicyRule.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_rule(request) @@ -849,21 +900,41 @@ def test_get_rule_rest( assert response.action == "action_value" assert response.description == "description_value" assert response.kind == "kind_value" - assert response.match == compute.SecurityPolicyRuleMatcher( - config=compute.SecurityPolicyRuleMatcherConfig( - src_ip_ranges=["src_ip_ranges_value"] - ) - ) assert response.preview is True assert response.priority == 898 +def test_get_rule_rest_bad_request( + transport: str = "rest", request_type=compute.GetRuleSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_rule(request) + + def test_get_rule_rest_from_dict(): test_get_rule_rest(request_type=dict) -def test_get_rule_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rule_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -871,30 +942,38 @@ def test_get_rule_rest_flattened(): return_value = compute.SecurityPolicyRule() # Wrap the value into a proper Response obj - json_return_value = compute.SecurityPolicyRule.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SecurityPolicyRule.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_rule( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", ) + mock_args.update(sample_request) + client.get_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/getRule" + % client.transport._host, + args[1], + ) -def test_get_rule_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rule_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -913,9 +992,16 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["security_policy_resource"] = compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -925,7 +1011,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -943,14 +1028,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -961,7 +1045,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -979,18 +1062,47 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["security_policy_resource"] = compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -998,41 +1110,45 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - security_policy_resource = compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) ) - ) - ) - client.insert( - project="project_value", security_policy_resource=security_policy_resource, + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.SecurityPolicy.to_json( - security_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1057,33 +1173,21 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SecurityPolicyList( - id="id_value", - items=[ - compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ) - ], - kind="kind_value", - next_page_token="next_page_token_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), + id="id_value", kind="kind_value", next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.SecurityPolicyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SecurityPolicyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1091,26 +1195,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListSecurityPoliciesRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1118,27 +1237,36 @@ def test_list_rest_flattened(): return_value = compute.SecurityPolicyList() # Wrap the value into a proper Response obj - json_return_value = compute.SecurityPolicyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SecurityPolicyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1148,11 +1276,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SecurityPolicyList( @@ -1182,16 +1312,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.SecurityPolicy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1204,29 +1333,21 @@ def test_list_preconfigured_expression_sets_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse( - preconfigured_expression_sets=compute.SecurityPoliciesWafConfig( - waf_rules=compute.PreconfiguredWafSet( - expression_sets=[ - compute.WafExpressionSet(aliases=["aliases_value"]) - ] - ) - ), - ) + return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_preconfigured_expression_sets(request) @@ -1235,19 +1356,40 @@ def test_list_preconfigured_expression_sets_rest( assert isinstance( response, compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse ) - assert response.preconfigured_expression_sets == compute.SecurityPoliciesWafConfig( - waf_rules=compute.PreconfiguredWafSet( - expression_sets=[compute.WafExpressionSet(aliases=["aliases_value"])] - ) + + +def test_list_preconfigured_expression_sets_rest_bad_request( + transport: str = "rest", + request_type=compute.ListPreconfiguredExpressionSetsSecurityPoliciesRequest, +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_preconfigured_expression_sets(request) + def test_list_preconfigured_expression_sets_rest_from_dict(): test_list_preconfigured_expression_sets_rest(request_type=dict) -def test_list_preconfigured_expression_sets_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_preconfigured_expression_sets_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1255,29 +1397,40 @@ def test_list_preconfigured_expression_sets_rest_flattened(): return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.SecurityPoliciesListPreconfiguredExpressionSetsResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_preconfigured_expression_sets(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list_preconfigured_expression_sets(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/listPreconfiguredExpressionSets" + % client.transport._host, + args[1], + ) -def test_list_preconfigured_expression_sets_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_preconfigured_expression_sets_rest_flattened_error( + transport: str = "rest", +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1295,9 +1448,16 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request_init["security_policy_resource"] = compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1307,7 +1467,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1325,14 +1484,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1343,7 +1501,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1361,18 +1518,47 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request_init["security_policy_resource"] = compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1380,44 +1566,46 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - security_policy_resource = compute.SecurityPolicy( - adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( - layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( - enable=True - ) - ) - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", - security_policy_resource=security_policy_resource, + security_policy_resource=compute.SecurityPolicy( + adaptive_protection_config=compute.SecurityPolicyAdaptiveProtectionConfig( + layer7_ddos_defense_config=compute.SecurityPolicyAdaptiveProtectionConfigLayer7DdosDefenseConfig( + enable=True + ) + ) + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) - assert compute.SecurityPolicy.to_json( - security_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1443,9 +1631,12 @@ def test_patch_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request_init["security_policy_rule_resource"] = compute.SecurityPolicyRule( + action="action_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1455,7 +1646,6 @@ def test_patch_rule_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1473,14 +1663,13 @@ def test_patch_rule_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch_rule(request) @@ -1491,7 +1680,6 @@ def test_patch_rule_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1509,18 +1697,43 @@ def test_patch_rule_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rule_rest_bad_request( + transport: str = "rest", request_type=compute.PatchRuleSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request_init["security_policy_rule_resource"] = compute.SecurityPolicyRule( + action="action_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch_rule(request) + + def test_patch_rule_rest_from_dict(): test_patch_rule_rest(request_type=dict) -def test_patch_rule_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rule_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1528,40 +1741,42 @@ def test_patch_rule_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - security_policy_rule_resource = compute.SecurityPolicyRule( - action="action_value" - ) - client.patch_rule( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", - security_policy_rule_resource=security_policy_rule_resource, + security_policy_rule_resource=compute.SecurityPolicyRule( + action="action_value" + ), ) + mock_args.update(sample_request) + client.patch_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) - assert compute.SecurityPolicyRule.to_json( - security_policy_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rule_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/patchRule" + % client.transport._host, + args[1], + ) + + +def test_patch_rule_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1583,9 +1798,9 @@ def test_remove_rule_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1595,7 +1810,6 @@ def test_remove_rule_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1613,14 +1827,13 @@ def test_remove_rule_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_rule(request) @@ -1631,7 +1844,6 @@ def test_remove_rule_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1649,18 +1861,40 @@ def test_remove_rule_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_rule_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveRuleSecurityPolicyRequest +): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "security_policy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_rule(request) + + def test_remove_rule_rest_from_dict(): test_remove_rule_rest(request_type=dict) -def test_remove_rule_rest_flattened(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_rule_rest_flattened(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1668,30 +1902,38 @@ def test_remove_rule_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.remove_rule( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "security_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", security_policy="security_policy_value", ) + mock_args.update(sample_request) + client.remove_rule(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "security_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/securityPolicies/{security_policy}/removeRule" + % client.transport._host, + args[1], + ) -def test_remove_rule_rest_flattened_error(): - client = SecurityPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_rule_rest_flattened_error(transport: str = "rest"): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1788,8 +2030,10 @@ def test_security_policies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_security_policies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1813,29 +2057,6 @@ def test_security_policies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_security_policies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.security_policies.transports.SecurityPoliciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SecurityPoliciesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_security_policies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1847,7 +2068,6 @@ def test_security_policies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_security_policies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1863,21 +2083,6 @@ def test_security_policies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_security_policies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SecurityPoliciesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_security_policies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2024,3 +2229,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = SecurityPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_service_attachments.py b/tests/unit/gapic/compute_v1/test_service_attachments.py index 34da9328a..f35f78bbb 100644 --- a/tests/unit/gapic/compute_v1/test_service_attachments.py +++ b/tests/unit/gapic/compute_v1/test_service_attachments.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.service_attachments import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.service_attachments import pagers from google.cloud.compute_v1.services.service_attachments import transports -from google.cloud.compute_v1.services.service_attachments.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -198,7 +182,7 @@ def test_service_attachments_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -207,6 +191,7 @@ def test_service_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,7 +199,7 @@ def test_service_attachments_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,6 +208,7 @@ def test_service_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -230,7 +216,7 @@ def test_service_attachments_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -239,6 +225,7 @@ def test_service_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -258,7 +245,7 @@ def test_service_attachments_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -267,6 +254,7 @@ def test_service_attachments_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -309,7 +297,7 @@ def test_service_attachments_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -326,6 +314,7 @@ def test_service_attachments_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -350,7 +339,7 @@ def test_service_attachments_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +348,7 @@ def test_service_attachments_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -371,7 +361,7 @@ def test_service_attachments_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,6 +370,7 @@ def test_service_attachments_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -394,7 +385,7 @@ def test_service_attachments_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -403,6 +394,7 @@ def test_service_attachments_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -417,7 +409,7 @@ def test_service_attachments_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -426,6 +418,7 @@ def test_service_attachments_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -437,41 +430,27 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ServiceAttachmentAggregatedList( id="id_value", - items={ - "key_value": compute.ServiceAttachmentsScopedList( - service_attachments=[ - compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint( - endpoint="endpoint_value" - ) - ] - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.ServiceAttachmentAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -479,33 +458,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.ServiceAttachmentsScopedList( - service_attachments=[ - compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint( - endpoint="endpoint_value" - ) - ] - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", + request_type=compute.AggregatedListServiceAttachmentsRequest, +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): +def test_aggregated_list_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -514,30 +503,37 @@ def test_aggregated_list_rest_flattened(): return_value = compute.ServiceAttachmentAggregatedList() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.ServiceAttachmentAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/serviceAttachments" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -548,13 +544,15 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = ServiceAttachmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ServiceAttachmentAggregatedList( @@ -590,10 +588,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.ServiceAttachmentsScopedList) assert pager.get("h") is None @@ -611,7 +608,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.ServiceAttachmentsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -623,9 +620,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -635,7 +636,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -653,14 +653,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -671,7 +670,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -689,19 +687,43 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteServiceAttachmentRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -710,34 +732,43 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", service_attachment="service_attachment_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "service_attachment_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -758,21 +789,19 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") - ], connection_preference=compute.ServiceAttachment.ConnectionPreference.ACCEPT_AUTOMATIC, - consumer_accept_lists=[ - compute.ServiceAttachmentConsumerProjectLimit(connection_limit=1710) - ], consumer_reject_lists=["consumer_reject_lists_value"], creation_timestamp="creation_timestamp_value", description="description_value", @@ -783,32 +812,25 @@ def test_get_rest( name="name_value", nat_subnets=["nat_subnets_value"], producer_forwarding_rule="producer_forwarding_rule_value", - psc_service_attachment_id=compute.Uint128(high=416), region="region_value", self_link="self_link_value", target_service="target_service_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.ServiceAttachment.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ServiceAttachment.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.ServiceAttachment) - assert response.connected_endpoints == [ - compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") - ] assert ( response.connection_preference == compute.ServiceAttachment.ConnectionPreference.ACCEPT_AUTOMATIC ) - assert response.consumer_accept_lists == [ - compute.ServiceAttachmentConsumerProjectLimit(connection_limit=1710) - ] assert response.consumer_reject_lists == ["consumer_reject_lists_value"] assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -819,19 +841,45 @@ def test_get_rest( assert response.name == "name_value" assert response.nat_subnets == ["nat_subnets_value"] assert response.producer_forwarding_rule == "producer_forwarding_rule_value" - assert response.psc_service_attachment_id == compute.Uint128(high=416) assert response.region == "region_value" assert response.self_link == "self_link_value" assert response.target_service == "target_service_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetServiceAttachmentRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -840,34 +888,43 @@ def test_get_rest_flattened(): return_value = compute.ServiceAttachment() # Wrap the value into a proper Response obj - json_return_value = compute.ServiceAttachment.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ServiceAttachment.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", service_attachment="service_attachment_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "service_attachment_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -888,61 +945,60 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicyServiceAttachmentRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): +def test_get_iam_policy_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -951,32 +1007,41 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -997,9 +1062,14 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["service_attachment_resource"] = compute.ServiceAttachment( + connected_endpoints=[ + compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1009,7 +1079,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1027,14 +1096,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1045,7 +1113,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1063,19 +1130,44 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertServiceAttachmentRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["service_attachment_resource"] = compute.ServiceAttachment( + connected_endpoints=[ + compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1084,43 +1176,45 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - service_attachment_resource = compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") - ] - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - service_attachment_resource=service_attachment_resource, + service_attachment_resource=compute.ServiceAttachment( + connected_endpoints=[ + compute.ServiceAttachmentConnectedEndpoint( + endpoint="endpoint_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.ServiceAttachment.to_json( - service_attachment_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1147,34 +1241,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ServiceAttachmentList( id="id_value", - items=[ - compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint( - endpoint="endpoint_value" - ) - ] - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ServiceAttachmentList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ServiceAttachmentList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1182,26 +1266,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") - ] - ) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListServiceAttachmentsRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1210,31 +1309,35 @@ def test_list_rest_flattened(): return_value = compute.ServiceAttachmentList() # Wrap the value into a proper Response obj - json_return_value = compute.ServiceAttachmentList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ServiceAttachmentList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1247,13 +1350,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ServiceAttachmentsClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ServiceAttachmentList( @@ -1283,16 +1388,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.ServiceAttachment) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1304,9 +1408,18 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + request_init["service_attachment_resource"] = compute.ServiceAttachment( + connected_endpoints=[ + compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") + ] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1316,7 +1429,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1334,14 +1446,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1352,7 +1463,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1370,19 +1480,48 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchServiceAttachmentRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + request_init["service_attachment_resource"] = compute.ServiceAttachment( + connected_endpoints=[ + compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") + ] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1391,45 +1530,50 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - service_attachment_resource = compute.ServiceAttachment( - connected_endpoints=[ - compute.ServiceAttachmentConnectedEndpoint(endpoint="endpoint_value") - ] - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "service_attachment": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", service_attachment="service_attachment_value", - service_attachment_resource=service_attachment_resource, + service_attachment_resource=compute.ServiceAttachment( + connected_endpoints=[ + compute.ServiceAttachmentConnectedEndpoint( + endpoint="endpoint_value" + ) + ] + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "service_attachment_value" in http_call[1] + str(body) + str(params) - assert compute.ServiceAttachment.to_json( - service_attachment_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{service_attachment}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1457,61 +1601,66 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicyServiceAttachmentRequest +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): +def test_set_iam_policy_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1520,43 +1669,46 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_policy_request_resource = compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=region_set_policy_request_resource, + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetPolicyRequest.to_json( - region_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1581,9 +1733,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1593,9 +1748,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1605,13 +1760,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", + request_type=compute.TestIamPermissionsServiceAttachmentRequest, +): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1620,43 +1802,46 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/serviceAttachments/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): client = ServiceAttachmentsClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1759,8 +1944,10 @@ def test_service_attachments_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_service_attachments_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1784,29 +1971,6 @@ def test_service_attachments_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_service_attachments_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.service_attachments.transports.ServiceAttachmentsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ServiceAttachmentsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_service_attachments_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1818,7 +1982,6 @@ def test_service_attachments_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_service_attachments_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1834,21 +1997,6 @@ def test_service_attachments_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_service_attachments_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ServiceAttachmentsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_service_attachments_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1995,3 +2143,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ServiceAttachmentsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_snapshots.py b/tests/unit/gapic/compute_v1/test_snapshots.py index 7f9b403e2..c48f05cf5 100644 --- a/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/tests/unit/gapic/compute_v1/test_snapshots.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.snapshots import SnapshotsClient from google.cloud.compute_v1.services.snapshots import pagers from google.cloud.compute_v1.services.snapshots import transports -from google.cloud.compute_v1.services.snapshots.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -185,7 +169,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -194,6 +178,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -201,7 +186,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -210,6 +195,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -217,7 +203,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -226,6 +212,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -245,7 +232,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -254,6 +241,7 @@ def test_snapshots_client_client_options(client_class, transport_class, transpor client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -284,7 +272,7 @@ def test_snapshots_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -301,6 +289,7 @@ def test_snapshots_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -325,7 +314,7 @@ def test_snapshots_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -334,6 +323,7 @@ def test_snapshots_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -346,7 +336,7 @@ def test_snapshots_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -355,6 +345,7 @@ def test_snapshots_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -369,7 +360,7 @@ def test_snapshots_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +369,7 @@ def test_snapshots_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +384,7 @@ def test_snapshots_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -401,6 +393,7 @@ def test_snapshots_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -411,9 +404,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -423,7 +416,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -441,14 +433,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -459,7 +450,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -477,18 +467,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteSnapshotRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,30 +508,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", snapshot="snapshot_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "snapshot": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", snapshot="snapshot_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "snapshot_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots/{snapshot}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -536,9 +554,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSnapshotReque credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -553,20 +571,13 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSnapshotReque id=205, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, license_codes=[1360], licenses=["licenses_value"], location_hint="location_hint_value", name="name_value", satisfies_pzs=True, self_link="self_link_value", - snapshot_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_disk="source_disk_value", - source_disk_encryption_key=compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ), source_disk_id="source_disk_id_value", status=compute.Snapshot.Status.CREATING, storage_bytes=1403, @@ -575,9 +586,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSnapshotReque ) # Wrap the value into a proper Response obj - json_return_value = compute.Snapshot.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Snapshot.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -593,20 +604,13 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSnapshotReque assert response.id == 205 assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.license_codes == [1360] assert response.licenses == ["licenses_value"] assert response.location_hint == "location_hint_value" assert response.name == "name_value" assert response.satisfies_pzs is True assert response.self_link == "self_link_value" - assert response.snapshot_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_disk == "source_disk_value" - assert response.source_disk_encryption_key == compute.CustomerEncryptionKey( - kms_key_name="kms_key_name_value" - ) assert response.source_disk_id == "source_disk_id_value" assert response.status == compute.Snapshot.Status.CREATING assert response.storage_bytes == 1403 @@ -614,12 +618,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSnapshotReque assert response.storage_locations == ["storage_locations_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetSnapshotRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -627,30 +656,36 @@ def test_get_rest_flattened(): return_value = compute.Snapshot() # Wrap the value into a proper Response obj - json_return_value = compute.Snapshot.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Snapshot.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", snapshot="snapshot_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "snapshot": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", snapshot="snapshot_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "snapshot_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots/{snapshot}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -669,60 +704,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicySnapshotRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -730,30 +766,36 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( - project="project_value", resource="resource_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", resource="resource_value",) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -770,26 +812,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListSnapshotsRe credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SnapshotList( id="id_value", - items=[compute.Snapshot(auto_created=True)], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SnapshotList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SnapshotList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -797,19 +837,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListSnapshotsRe # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.Snapshot(auto_created=True)] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListSnapshotsRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -817,27 +880,36 @@ def test_list_rest_flattened(): return_value = compute.SnapshotList() # Wrap the value into a proper Response obj - json_return_value = compute.SnapshotList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SnapshotList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -847,11 +919,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SnapshotList( @@ -873,16 +947,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Snapshot) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -894,60 +967,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicySnapshotRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_policy_request_resource"] = compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -955,40 +1035,42 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_policy_request_resource = compute.GlobalSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_policy_request_resource=global_set_policy_request_resource, + global_set_policy_request_resource=compute.GlobalSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetPolicyRequest.to_json( - global_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1010,9 +1092,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1022,7 +1107,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1040,14 +1124,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1058,7 +1141,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1076,18 +1158,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsSnapshotRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["global_set_labels_request_resource"] = compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1095,40 +1202,42 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - global_set_labels_request_resource = compute.GlobalSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - global_set_labels_request_resource=global_set_labels_request_resource, + global_set_labels_request_resource=compute.GlobalSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.GlobalSetLabelsRequest.to_json( - global_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1150,9 +1259,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1162,9 +1274,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1174,12 +1286,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsSnapshotRequest +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "resource": "sample2"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1187,40 +1327,42 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "resource": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = SnapshotsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/snapshots/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1317,8 +1459,10 @@ def test_snapshots_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_snapshots_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1342,29 +1486,6 @@ def test_snapshots_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_snapshots_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.snapshots.transports.SnapshotsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SnapshotsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_snapshots_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1376,7 +1497,6 @@ def test_snapshots_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_snapshots_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1392,21 +1512,6 @@ def test_snapshots_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_snapshots_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SnapshotsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_snapshots_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1553,3 +1658,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_ssl_certificates.py b/tests/unit/gapic/compute_v1/test_ssl_certificates.py index f1673c7e1..b0885680d 100644 --- a/tests/unit/gapic/compute_v1/test_ssl_certificates.py +++ b/tests/unit/gapic/compute_v1/test_ssl_certificates.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.ssl_certificates import SslCertificatesClient from google.cloud.compute_v1.services.ssl_certificates import pagers from google.cloud.compute_v1.services.ssl_certificates import transports -from google.cloud.compute_v1.services.ssl_certificates.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_ssl_certificates_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_ssl_certificates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_ssl_certificates_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_ssl_certificates_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_ssl_certificates_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_ssl_certificates_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_ssl_certificates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_ssl_certificates_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_ssl_certificates_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_ssl_certificates_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_ssl_certificates_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_ssl_certificates_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_ssl_certificates_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_ssl_certificates_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_ssl_certificates_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,33 +426,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SslCertificateAggregatedList( id="id_value", - items={ - "key_value": compute.SslCertificatesScopedList( - ssl_certificates=[ - compute.SslCertificate(certificate="certificate_value") - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificateAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificateAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -467,24 +452,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.SslCertificatesScopedList( - ssl_certificates=[compute.SslCertificate(certificate="certificate_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListSslCertificatesRequest +): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -492,27 +496,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.SslCertificateAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificateAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificateAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/sslCertificates" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -522,11 +535,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SslCertificateAggregatedList( @@ -562,10 +577,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.SslCertificatesScopedList) assert pager.get("h") is None @@ -583,7 +597,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.SslCertificatesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -595,9 +609,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_certificate": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -607,7 +621,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -625,14 +638,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -643,7 +655,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -661,18 +672,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteSslCertificateRequest +): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_certificate": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -680,30 +713,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "ssl_certificate": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", ssl_certificate="ssl_certificate_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "ssl_certificate_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -722,9 +763,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_certificate": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -736,24 +777,18 @@ def test_get_rest( expire_time="expire_time_value", id=205, kind="kind_value", - managed=compute.SslCertificateManagedSslCertificate( - domain_status={"key_value": "value_value"} - ), name="name_value", private_key="private_key_value", region="region_value", self_link="self_link_value", - self_managed=compute.SslCertificateSelfManagedSslCertificate( - certificate="certificate_value" - ), subject_alternative_names=["subject_alternative_names_value"], type_=compute.SslCertificate.Type.MANAGED, ) # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificate.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -766,26 +801,45 @@ def test_get_rest( assert response.expire_time == "expire_time_value" assert response.id == 205 assert response.kind == "kind_value" - assert response.managed == compute.SslCertificateManagedSslCertificate( - domain_status={"key_value": "value_value"} - ) assert response.name == "name_value" assert response.private_key == "private_key_value" assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.self_managed == compute.SslCertificateSelfManagedSslCertificate( - certificate="certificate_value" - ) assert response.subject_alternative_names == ["subject_alternative_names_value"] assert response.type_ == compute.SslCertificate.Type.MANAGED +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetSslCertificateRequest +): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_certificate": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -793,30 +847,38 @@ def test_get_rest_flattened(): return_value = compute.SslCertificate() # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificate.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificate.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "ssl_certificate": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", ssl_certificate="ssl_certificate_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "ssl_certificate_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslCertificates/{ssl_certificate}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -835,9 +897,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["ssl_certificate_resource"] = compute.SslCertificate( + certificate="certificate_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -847,7 +912,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -865,14 +929,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -883,7 +946,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -901,18 +963,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertSslCertificateRequest +): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["ssl_certificate_resource"] = compute.SslCertificate( + certificate="certificate_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -920,37 +1007,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - ssl_certificate_resource = compute.SslCertificate( - certificate="certificate_value" - ) - client.insert( - project="project_value", ssl_certificate_resource=ssl_certificate_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ssl_certificate_resource=compute.SslCertificate( + certificate="certificate_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.SslCertificate.to_json( - ssl_certificate_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslCertificates" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -971,26 +1062,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SslCertificateList( id="id_value", - items=[compute.SslCertificate(certificate="certificate_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificateList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -998,19 +1087,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.SslCertificate(certificate="certificate_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListSslCertificatesRequest +): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1018,27 +1130,36 @@ def test_list_rest_flattened(): return_value = compute.SslCertificateList() # Wrap the value into a proper Response obj - json_return_value = compute.SslCertificateList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslCertificateList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslCertificates" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1048,11 +1169,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = SslCertificatesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SslCertificateList( @@ -1082,16 +1205,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.SslCertificate) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1176,8 +1298,10 @@ def test_ssl_certificates_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_ssl_certificates_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1201,29 +1325,6 @@ def test_ssl_certificates_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_ssl_certificates_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.ssl_certificates.transports.SslCertificatesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SslCertificatesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_ssl_certificates_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1235,7 +1336,6 @@ def test_ssl_certificates_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_ssl_certificates_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1251,21 +1351,6 @@ def test_ssl_certificates_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_ssl_certificates_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SslCertificatesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_ssl_certificates_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1412,3 +1497,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = SslCertificatesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_ssl_policies.py b/tests/unit/gapic/compute_v1/test_ssl_policies.py index b92d387e0..eb953914e 100644 --- a/tests/unit/gapic/compute_v1/test_ssl_policies.py +++ b/tests/unit/gapic/compute_v1/test_ssl_policies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.ssl_policies import SslPoliciesClient from google.cloud.compute_v1.services.ssl_policies import pagers from google.cloud.compute_v1.services.ssl_policies import transports -from google.cloud.compute_v1.services.ssl_policies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_ssl_policies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_ssl_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_ssl_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_ssl_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_ssl_policies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_ssl_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_ssl_policies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_ssl_policies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_ssl_policies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_ssl_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_ssl_policies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_ssl_policies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_ssl_policies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_ssl_policies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_ssl_policies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_ssl_policies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_ssl_policies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_ssl_policies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,9 +408,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_policy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -427,7 +420,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -445,14 +437,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -463,7 +454,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -481,18 +471,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteSslPolicyRequest +): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_policy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -500,30 +512,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", ssl_policy="ssl_policy_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "ssl_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", ssl_policy="ssl_policy_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "ssl_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -540,9 +558,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSslPolicyRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_policy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -559,13 +577,12 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSslPolicyRequ name="name_value", profile=compute.SslPolicy.Profile.COMPATIBLE, self_link="self_link_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], ) # Wrap the value into a proper Response obj - json_return_value = compute.SslPolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslPolicy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -583,17 +600,39 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSslPolicyRequ assert response.name == "name_value" assert response.profile == compute.SslPolicy.Profile.COMPATIBLE assert response.self_link == "self_link_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetSslPolicyRequest +): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_policy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -601,30 +640,36 @@ def test_get_rest_flattened(): return_value = compute.SslPolicy() # Wrap the value into a proper Response obj - json_return_value = compute.SslPolicy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslPolicy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", ssl_policy="ssl_policy_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "ssl_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", ssl_policy="ssl_policy_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "ssl_policy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -643,9 +688,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["ssl_policy_resource"] = compute.SslPolicy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -655,7 +703,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -673,14 +720,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -691,7 +737,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -709,18 +754,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertSslPolicyRequest +): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["ssl_policy_resource"] = compute.SslPolicy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -728,37 +798,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - ssl_policy_resource = compute.SslPolicy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( - project="project_value", ssl_policy_resource=ssl_policy_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ssl_policy_resource=compute.SslPolicy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.SslPolicy.to_json( - ssl_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslPolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -779,26 +853,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SslPoliciesList( id="id_value", - items=[compute.SslPolicy(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SslPoliciesList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslPoliciesList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -806,21 +878,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.SslPolicy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListSslPoliciesRequest +): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -828,27 +921,36 @@ def test_list_rest_flattened(): return_value = compute.SslPoliciesList() # Wrap the value into a proper Response obj - json_return_value = compute.SslPoliciesList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SslPoliciesList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslPolicies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -858,11 +960,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SslPoliciesList( @@ -886,16 +990,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.SslPolicy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -908,9 +1011,9 @@ def test_list_available_features_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -920,11 +1023,11 @@ def test_list_available_features_rest( ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.SslPoliciesListAvailableFeaturesResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_available_features(request) @@ -934,12 +1037,38 @@ def test_list_available_features_rest( assert response.features == ["features_value"] +def test_list_available_features_rest_bad_request( + transport: str = "rest", + request_type=compute.ListAvailableFeaturesSslPoliciesRequest, +): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_available_features(request) + + def test_list_available_features_rest_from_dict(): test_list_available_features_rest(request_type=dict) -def test_list_available_features_rest_flattened(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_available_features_rest_flattened(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -947,29 +1076,38 @@ def test_list_available_features_rest_flattened(): return_value = compute.SslPoliciesListAvailableFeaturesResponse() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.SslPoliciesListAvailableFeaturesResponse.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_available_features(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list_available_features(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslPolicies/listAvailableFeatures" + % client.transport._host, + args[1], + ) -def test_list_available_features_rest_flattened_error(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_available_features_rest_flattened_error(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -986,9 +1124,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_policy": "sample2"} + request_init["ssl_policy_resource"] = compute.SslPolicy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -998,7 +1139,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1016,14 +1156,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1034,7 +1173,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1052,18 +1190,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchSslPolicyRequest +): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "ssl_policy": "sample2"} + request_init["ssl_policy_resource"] = compute.SslPolicy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1071,40 +1234,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - ssl_policy_resource = compute.SslPolicy( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "ssl_policy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", ssl_policy="ssl_policy_value", - ssl_policy_resource=ssl_policy_resource, + ssl_policy_resource=compute.SslPolicy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "ssl_policy_value" in http_call[1] + str(body) + str(params) - assert compute.SslPolicy.to_json( - ssl_policy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = SslPoliciesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/sslPolicies/{ssl_policy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1200,8 +1365,10 @@ def test_ssl_policies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_ssl_policies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1225,29 +1392,6 @@ def test_ssl_policies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_ssl_policies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.ssl_policies.transports.SslPoliciesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SslPoliciesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_ssl_policies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1259,7 +1403,6 @@ def test_ssl_policies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_ssl_policies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1275,21 +1418,6 @@ def test_ssl_policies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_ssl_policies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SslPoliciesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_ssl_policies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1436,3 +1564,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = SslPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_subnetworks.py b/tests/unit/gapic/compute_v1/test_subnetworks.py index a5c494835..66517e7c7 100644 --- a/tests/unit/gapic/compute_v1/test_subnetworks.py +++ b/tests/unit/gapic/compute_v1/test_subnetworks.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.subnetworks import SubnetworksClient from google.cloud.compute_v1.services.subnetworks import pagers from google.cloud.compute_v1.services.subnetworks import transports -from google.cloud.compute_v1.services.subnetworks.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_subnetworks_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_subnetworks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_subnetworks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_subnetworks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_subnetworks_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_subnetworks_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_subnetworks_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_subnetworks_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_subnetworks_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_subnetworks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_subnetworks_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_subnetworks_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_subnetworks_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_subnetworks_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_subnetworks_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_subnetworks_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_subnetworks_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_subnetworks_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,35 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SubnetworkAggregatedList( id="id_value", - items={ - "key_value": compute.SubnetworksScopedList( - subnetworks=[ - compute.Subnetwork( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SubnetworkAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SubnetworkAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -451,26 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.SubnetworksScopedList( - subnetworks=[ - compute.Subnetwork(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListSubnetworksRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -478,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.SubnetworkAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.SubnetworkAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SubnetworkAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/subnetworks" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -508,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SubnetworkAggregatedList( @@ -545,10 +556,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.SubnetworksScopedList) assert pager.get("h") is None @@ -566,7 +576,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.SubnetworksScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -578,9 +588,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -590,7 +600,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -608,14 +617,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -626,7 +634,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -644,18 +651,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteSubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -663,33 +692,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "subnetwork": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", subnetwork="subnetwork_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "subnetwork_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -709,9 +749,12 @@ def test_expand_ip_cidr_range_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ] = compute.SubnetworksExpandIpCidrRangeRequest(ip_cidr_range="ip_cidr_range_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -721,7 +764,6 @@ def test_expand_ip_cidr_range_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -739,14 +781,13 @@ def test_expand_ip_cidr_range_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.expand_ip_cidr_range(request) @@ -757,7 +798,6 @@ def test_expand_ip_cidr_range_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -775,18 +815,43 @@ def test_expand_ip_cidr_range_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_expand_ip_cidr_range_rest_bad_request( + transport: str = "rest", request_type=compute.ExpandIpCidrRangeSubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request_init[ + "subnetworks_expand_ip_cidr_range_request_resource" + ] = compute.SubnetworksExpandIpCidrRangeRequest(ip_cidr_range="ip_cidr_range_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.expand_ip_cidr_range(request) + + def test_expand_ip_cidr_range_rest_from_dict(): test_expand_ip_cidr_range_rest(request_type=dict) -def test_expand_ip_cidr_range_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_expand_ip_cidr_range_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -794,42 +859,47 @@ def test_expand_ip_cidr_range_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - subnetworks_expand_ip_cidr_range_request_resource = compute.SubnetworksExpandIpCidrRangeRequest( - ip_cidr_range="ip_cidr_range_value" - ) - client.expand_ip_cidr_range( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "subnetwork": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", subnetwork="subnetwork_value", - subnetworks_expand_ip_cidr_range_request_resource=subnetworks_expand_ip_cidr_range_request_resource, + subnetworks_expand_ip_cidr_range_request_resource=compute.SubnetworksExpandIpCidrRangeRequest( + ip_cidr_range="ip_cidr_range_value" + ), ) + mock_args.update(sample_request) + client.expand_ip_cidr_range(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "subnetwork_value" in http_call[1] + str(body) + str(params) - assert compute.SubnetworksExpandIpCidrRangeRequest.to_json( - subnetworks_expand_ip_cidr_range_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_expand_ip_cidr_range_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/expandIpCidrRange" + % client.transport._host, + args[1], + ) + + +def test_expand_ip_cidr_range_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -850,9 +920,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSubnetworkReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -869,9 +939,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSubnetworkReq ipv6_access_type=compute.Subnetwork.Ipv6AccessType.EXTERNAL, ipv6_cidr_range="ipv6_cidr_range_value", kind="kind_value", - log_config=compute.SubnetworkLogConfig( - aggregation_interval=compute.SubnetworkLogConfig.AggregationInterval.INTERVAL_10_MIN - ), name="name_value", network="network_value", private_ip_google_access=True, @@ -879,18 +946,15 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSubnetworkReq purpose=compute.Subnetwork.Purpose.INTERNAL_HTTPS_LOAD_BALANCER, region="region_value", role=compute.Subnetwork.Role.ACTIVE, - secondary_ip_ranges=[ - compute.SubnetworkSecondaryRange(ip_cidr_range="ip_cidr_range_value") - ], self_link="self_link_value", stack_type=compute.Subnetwork.StackType.IPV4_IPV6, state=compute.Subnetwork.State.DRAINING, ) # Wrap the value into a proper Response obj - json_return_value = compute.Subnetwork.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Subnetwork.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -908,9 +972,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSubnetworkReq assert response.ipv6_access_type == compute.Subnetwork.Ipv6AccessType.EXTERNAL assert response.ipv6_cidr_range == "ipv6_cidr_range_value" assert response.kind == "kind_value" - assert response.log_config == compute.SubnetworkLogConfig( - aggregation_interval=compute.SubnetworkLogConfig.AggregationInterval.INTERVAL_10_MIN - ) assert response.name == "name_value" assert response.network == "network_value" assert response.private_ip_google_access is True @@ -921,20 +982,42 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetSubnetworkReq assert response.purpose == compute.Subnetwork.Purpose.INTERNAL_HTTPS_LOAD_BALANCER assert response.region == "region_value" assert response.role == compute.Subnetwork.Role.ACTIVE - assert response.secondary_ip_ranges == [ - compute.SubnetworkSecondaryRange(ip_cidr_range="ip_cidr_range_value") - ] assert response.self_link == "self_link_value" assert response.stack_type == compute.Subnetwork.StackType.IPV4_IPV6 assert response.state == compute.Subnetwork.State.DRAINING +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetSubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -942,33 +1025,44 @@ def test_get_rest_flattened(): return_value = compute.Subnetwork() # Wrap the value into a proper Response obj - json_return_value = compute.Subnetwork.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Subnetwork.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "subnetwork": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", subnetwork="subnetwork_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "subnetwork_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -988,60 +1082,61 @@ def test_get_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_get_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.GetIamPolicySubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_iam_policy(request) + + def test_get_iam_policy_rest_from_dict(): test_get_iam_policy_rest(request_type=dict) -def test_get_iam_policy_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1049,31 +1144,42 @@ def test_get_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", ) + mock_args.update(sample_request) + client.get_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) -def test_get_iam_policy_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1093,9 +1199,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["subnetwork_resource"] = compute.Subnetwork( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1105,7 +1214,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1123,14 +1231,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1141,7 +1248,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1159,18 +1265,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertSubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["subnetwork_resource"] = compute.Subnetwork( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1178,40 +1309,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - subnetwork_resource = compute.Subnetwork( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - subnetwork_resource=subnetwork_resource, + subnetwork_resource=compute.Subnetwork( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.Subnetwork.to_json( - subnetwork_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1233,26 +1366,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.SubnetworkList( id="id_value", - items=[compute.Subnetwork(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.SubnetworkList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SubnetworkList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1260,21 +1391,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Subnetwork(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListSubnetworksRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1282,30 +1434,36 @@ def test_list_rest_flattened(): return_value = compute.SubnetworkList() # Wrap the value into a proper Response obj - json_return_value = compute.SubnetworkList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.SubnetworkList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1317,11 +1475,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.SubnetworkList( @@ -1351,16 +1511,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Subnetwork) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1372,28 +1531,26 @@ def test_list_usable_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.UsableSubnetworksAggregatedList( id="id_value", - items=[compute.UsableSubnetwork(ip_cidr_range="ip_cidr_range_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.UsableSubnetworksAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list_usable(request) @@ -1401,21 +1558,42 @@ def test_list_usable_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListUsablePager) assert response.id == "id_value" - assert response.items == [ - compute.UsableSubnetwork(ip_cidr_range="ip_cidr_range_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_usable_rest_bad_request( + transport: str = "rest", request_type=compute.ListUsableSubnetworksRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list_usable(request) def test_list_usable_rest_from_dict(): test_list_usable_rest(request_type=dict) -def test_list_usable_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_usable_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1423,29 +1601,38 @@ def test_list_usable_rest_flattened(): return_value = compute.UsableSubnetworksAggregatedList() # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 json_return_value = compute.UsableSubnetworksAggregatedList.to_json( return_value ) - response_value = Response() - response_value.status_code = 200 + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_usable(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list_usable(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/subnetworks/listUsable" + % client.transport._host, + args[1], + ) -def test_list_usable_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_usable_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1455,11 +1642,13 @@ def test_list_usable_rest_flattened_error(): ) -def test_list_usable_pager(): +def test_list_usable_rest_pager(): client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.UsableSubnetworksAggregatedList( @@ -1491,16 +1680,15 @@ def test_list_usable_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list_usable(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list_usable(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.UsableSubnetwork) for i in results) - pages = list(client.list_usable(request={}).pages) + pages = list(client.list_usable(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1512,9 +1700,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request_init["subnetwork_resource"] = compute.Subnetwork( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1524,7 +1715,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1542,14 +1732,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1560,7 +1749,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1578,18 +1766,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchSubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request_init["subnetwork_resource"] = compute.Subnetwork( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1597,42 +1810,47 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - subnetwork_resource = compute.Subnetwork( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "subnetwork": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", subnetwork="subnetwork_value", - subnetwork_resource=subnetwork_resource, + subnetwork_resource=compute.Subnetwork( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "subnetwork_value" in http_call[1] + str(body) + str(params) - assert compute.Subnetwork.to_json( - subnetwork_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1655,60 +1873,67 @@ def test_set_iam_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Policy( - audit_configs=[ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig( - exempted_members=["exempted_members_value"] - ) - ] - ) - ], - bindings=[compute.Binding(binding_id="binding_id_value")], - etag="etag_value", - iam_owned=True, - rules=[compute.Rule(action=compute.Rule.Action.ALLOW)], - version=774, - ) + return_value = compute.Policy(etag="etag_value", iam_owned=True, version=774,) # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_iam_policy(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.Policy) - assert response.audit_configs == [ - compute.AuditConfig( - audit_log_configs=[ - compute.AuditLogConfig(exempted_members=["exempted_members_value"]) - ] - ) - ] - assert response.bindings == [compute.Binding(binding_id="binding_id_value")] assert response.etag == "etag_value" assert response.iam_owned is True - assert response.rules == [compute.Rule(action=compute.Rule.Action.ALLOW)] assert response.version == 774 +def test_set_iam_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetIamPolicySubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_iam_policy(request) + + def test_set_iam_policy_rest_from_dict(): test_set_iam_policy_rest(request_type=dict) -def test_set_iam_policy_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_iam_policy_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1716,42 +1941,47 @@ def test_set_iam_policy_rest_flattened(): return_value = compute.Policy() # Wrap the value into a proper Response obj - json_return_value = compute.Policy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Policy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_policy_request_resource = compute.RegionSetPolicyRequest( - bindings=[compute.Binding(binding_id="binding_id_value")] - ) - client.set_iam_policy( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_policy_request_resource=region_set_policy_request_resource, + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), ) + mock_args.update(sample_request) + client.set_iam_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetPolicyRequest.to_json( - region_set_policy_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_iam_policy_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1775,9 +2005,14 @@ def test_set_private_ip_google_access_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ] = compute.SubnetworksSetPrivateIpGoogleAccessRequest( + private_ip_google_access=True + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1787,7 +2022,6 @@ def test_set_private_ip_google_access_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1805,14 +2039,13 @@ def test_set_private_ip_google_access_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_private_ip_google_access(request) @@ -1823,7 +2056,6 @@ def test_set_private_ip_google_access_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1841,18 +2073,46 @@ def test_set_private_ip_google_access_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_private_ip_google_access_rest_bad_request( + transport: str = "rest", + request_type=compute.SetPrivateIpGoogleAccessSubnetworkRequest, +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "subnetwork": "sample3"} + request_init[ + "subnetworks_set_private_ip_google_access_request_resource" + ] = compute.SubnetworksSetPrivateIpGoogleAccessRequest( + private_ip_google_access=True + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_private_ip_google_access(request) + + def test_set_private_ip_google_access_rest_from_dict(): test_set_private_ip_google_access_rest(request_type=dict) -def test_set_private_ip_google_access_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_private_ip_google_access_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1860,42 +2120,47 @@ def test_set_private_ip_google_access_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - subnetworks_set_private_ip_google_access_request_resource = compute.SubnetworksSetPrivateIpGoogleAccessRequest( - private_ip_google_access=True - ) - client.set_private_ip_google_access( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "subnetwork": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", subnetwork="subnetwork_value", - subnetworks_set_private_ip_google_access_request_resource=subnetworks_set_private_ip_google_access_request_resource, + subnetworks_set_private_ip_google_access_request_resource=compute.SubnetworksSetPrivateIpGoogleAccessRequest( + private_ip_google_access=True + ), ) + mock_args.update(sample_request) + client.set_private_ip_google_access(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "subnetwork_value" in http_call[1] + str(body) + str(params) - assert compute.SubnetworksSetPrivateIpGoogleAccessRequest.to_json( - subnetworks_set_private_ip_google_access_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_private_ip_google_access_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{subnetwork}/setPrivateIpGoogleAccess" + % client.transport._host, + args[1], + ) + + +def test_set_private_ip_google_access_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1918,9 +2183,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1930,9 +2198,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1942,12 +2210,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsSubnetworkRequest +): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1955,42 +2251,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = SubnetworksClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/subnetworks/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -2093,8 +2394,10 @@ def test_subnetworks_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_subnetworks_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2118,29 +2421,6 @@ def test_subnetworks_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_subnetworks_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.subnetworks.transports.SubnetworksTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.SubnetworksTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_subnetworks_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2152,7 +2432,6 @@ def test_subnetworks_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_subnetworks_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2168,21 +2447,6 @@ def test_subnetworks_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_subnetworks_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - SubnetworksClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_subnetworks_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2329,3 +2593,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = SubnetworksClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py b/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py index 1290b6ef8..76f98bfc8 100644 --- a/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_grpc_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_grpc_proxies import TargetGrpcProxiesClient from google.cloud.compute_v1.services.target_grpc_proxies import pagers from google.cloud.compute_v1.services.target_grpc_proxies import transports -from google.cloud.compute_v1.services.target_grpc_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_target_grpc_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_target_grpc_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_target_grpc_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_target_grpc_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_target_grpc_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_target_grpc_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_target_grpc_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_target_grpc_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_target_grpc_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_target_grpc_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_target_grpc_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_target_grpc_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_target_grpc_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_target_grpc_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_target_grpc_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_target_grpc_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_target_grpc_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_target_grpc_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +439,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +456,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -482,7 +473,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +490,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetGrpcProxyRequest +): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,30 +531,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_grpc_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_grpc_proxy="target_grpc_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_grpc_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -561,9 +581,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -582,9 +602,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetGrpcProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetGrpcProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -603,12 +623,37 @@ def test_get_rest( assert response.validate_for_proxyless is True +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetGrpcProxyRequest +): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -616,30 +661,38 @@ def test_get_rest_flattened(): return_value = compute.TargetGrpcProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetGrpcProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetGrpcProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_grpc_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_grpc_proxy="target_grpc_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_grpc_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -658,9 +711,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_grpc_proxy_resource"] = compute.TargetGrpcProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -670,7 +726,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -688,14 +743,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -706,7 +760,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -724,18 +777,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetGrpcProxyRequest +): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_grpc_proxy_resource"] = compute.TargetGrpcProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -743,38 +821,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_grpc_proxy_resource = compute.TargetGrpcProxy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - target_grpc_proxy_resource=target_grpc_proxy_resource, + target_grpc_proxy_resource=compute.TargetGrpcProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.TargetGrpcProxy.to_json( - target_grpc_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetGrpcProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -795,28 +876,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetGrpcProxyList( id="id_value", - items=[ - compute.TargetGrpcProxy(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetGrpcProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetGrpcProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -824,21 +901,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetGrpcProxy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetGrpcProxiesRequest +): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -846,27 +944,36 @@ def test_list_rest_flattened(): return_value = compute.TargetGrpcProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetGrpcProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetGrpcProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetGrpcProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -876,11 +983,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetGrpcProxyList( @@ -910,16 +1019,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetGrpcProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -931,9 +1039,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} + request_init["target_grpc_proxy_resource"] = compute.TargetGrpcProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -943,7 +1054,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -961,14 +1071,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -979,7 +1088,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -997,18 +1105,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchTargetGrpcProxyRequest +): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_grpc_proxy": "sample2"} + request_init["target_grpc_proxy_resource"] = compute.TargetGrpcProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1016,40 +1149,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_grpc_proxy_resource = compute.TargetGrpcProxy( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_grpc_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_grpc_proxy="target_grpc_proxy_value", - target_grpc_proxy_resource=target_grpc_proxy_resource, + target_grpc_proxy_resource=compute.TargetGrpcProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_grpc_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetGrpcProxy.to_json( - target_grpc_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = TargetGrpcProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetGrpcProxies/{target_grpc_proxy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1146,8 +1281,10 @@ def test_target_grpc_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_grpc_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1171,29 +1308,6 @@ def test_target_grpc_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_grpc_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_grpc_proxies.transports.TargetGrpcProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetGrpcProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_grpc_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1205,7 +1319,6 @@ def test_target_grpc_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_grpc_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1221,21 +1334,6 @@ def test_target_grpc_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_grpc_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetGrpcProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_grpc_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1382,3 +1480,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetGrpcProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_http_proxies.py b/tests/unit/gapic/compute_v1/test_target_http_proxies.py index 932e4a888..334c1818c 100644 --- a/tests/unit/gapic/compute_v1/test_target_http_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_http_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_http_proxies import TargetHttpProxiesClient from google.cloud.compute_v1.services.target_http_proxies import pagers from google.cloud.compute_v1.services.target_http_proxies import transports -from google.cloud.compute_v1.services.target_http_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_target_http_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_target_http_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_target_http_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_target_http_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_target_http_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_target_http_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_target_http_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_target_http_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_target_http_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_target_http_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_target_http_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_target_http_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_target_http_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_target_http_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_target_http_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,24 +427,15 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetHttpProxyAggregatedList( id="id_value", - items={ - "key_value": compute.TargetHttpProxiesScopedList( - target_http_proxies=[ - compute.TargetHttpProxy( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", @@ -459,9 +443,9 @@ def test_aggregated_list_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxyAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxyAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -469,25 +453,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.TargetHttpProxiesScopedList( - target_http_proxies=[ - compute.TargetHttpProxy(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListTargetHttpProxiesRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) + + def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -495,27 +497,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.TargetHttpProxyAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxyAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxyAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/targetHttpProxies" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -525,11 +536,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetHttpProxyAggregatedList( @@ -565,10 +578,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.TargetHttpProxiesScopedList) assert pager.get("h") is None @@ -586,7 +598,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.TargetHttpProxiesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -598,9 +610,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -610,7 +622,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -628,14 +639,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -646,7 +656,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -664,18 +673,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetHttpProxyRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -683,30 +714,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_http_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_http_proxy="target_http_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -725,9 +764,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -746,9 +785,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -767,12 +806,37 @@ def test_get_rest( assert response.url_map == "url_map_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetHttpProxyRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -780,30 +844,38 @@ def test_get_rest_flattened(): return_value = compute.TargetHttpProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_http_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_http_proxy="target_http_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -822,9 +894,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_http_proxy_resource"] = compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -834,7 +909,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -852,14 +926,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -870,7 +943,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -888,18 +960,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetHttpProxyRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_http_proxy_resource"] = compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -907,38 +1004,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_http_proxy_resource = compute.TargetHttpProxy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - target_http_proxy_resource=target_http_proxy_resource, + target_http_proxy_resource=compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpProxy.to_json( - target_http_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -959,28 +1059,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetHttpProxyList( id="id_value", - items=[ - compute.TargetHttpProxy(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -988,21 +1084,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetHttpProxy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetHttpProxiesRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1010,27 +1127,36 @@ def test_list_rest_flattened(): return_value = compute.TargetHttpProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1040,11 +1166,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetHttpProxyList( @@ -1074,16 +1202,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetHttpProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1095,9 +1222,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request_init["target_http_proxy_resource"] = compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1107,7 +1237,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1125,14 +1254,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1143,7 +1271,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1161,18 +1288,43 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchTargetHttpProxyRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request_init["target_http_proxy_resource"] = compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1180,40 +1332,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_http_proxy_resource = compute.TargetHttpProxy( - creation_timestamp="creation_timestamp_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_http_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_http_proxy="target_http_proxy_value", - target_http_proxy_resource=target_http_proxy_resource, + target_http_proxy_resource=compute.TargetHttpProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpProxy.to_json( - target_http_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpProxies/{target_http_proxy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1235,9 +1389,12 @@ def test_set_url_map_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1247,7 +1404,6 @@ def test_set_url_map_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1265,14 +1421,13 @@ def test_set_url_map_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_url_map(request) @@ -1283,7 +1438,6 @@ def test_set_url_map_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1301,18 +1455,43 @@ def test_set_url_map_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_url_map_rest_bad_request( + transport: str = "rest", request_type=compute.SetUrlMapTargetHttpProxyRequest +): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_http_proxy": "sample2"} + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + def test_set_url_map_rest_from_dict(): test_set_url_map_rest(request_type=dict) -def test_set_url_map_rest_flattened(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_url_map_rest_flattened(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1320,38 +1499,40 @@ def test_set_url_map_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_reference_resource = compute.UrlMapReference(url_map="url_map_value") - client.set_url_map( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_http_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_http_proxy="target_http_proxy_value", - url_map_reference_resource=url_map_reference_resource, + url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"), ) + mock_args.update(sample_request) + client.set_url_map(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_http_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMapReference.to_json( - url_map_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_url_map_rest_flattened_error(): - client = TargetHttpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/targetHttpProxies/{target_http_proxy}/setUrlMap" + % client.transport._host, + args[1], + ) + + +def test_set_url_map_rest_flattened_error(transport: str = "rest"): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1448,8 +1629,10 @@ def test_target_http_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_http_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1473,29 +1656,6 @@ def test_target_http_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_http_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_http_proxies.transports.TargetHttpProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetHttpProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_http_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1507,7 +1667,6 @@ def test_target_http_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_http_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1523,21 +1682,6 @@ def test_target_http_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_http_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetHttpProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_http_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1684,3 +1828,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetHttpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_https_proxies.py b/tests/unit/gapic/compute_v1/test_target_https_proxies.py index 4cd46442c..01fa33dd2 100644 --- a/tests/unit/gapic/compute_v1/test_target_https_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_https_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,6 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_https_proxies import ( @@ -38,28 +39,11 @@ ) from google.cloud.compute_v1.services.target_https_proxies import pagers from google.cloud.compute_v1.services.target_https_proxies import transports -from google.cloud.compute_v1.services.target_https_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -198,7 +182,7 @@ def test_target_https_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -207,6 +191,7 @@ def test_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -214,7 +199,7 @@ def test_target_https_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -223,6 +208,7 @@ def test_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -230,7 +216,7 @@ def test_target_https_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -239,6 +225,7 @@ def test_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -258,7 +245,7 @@ def test_target_https_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -267,6 +254,7 @@ def test_target_https_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -309,7 +297,7 @@ def test_target_https_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -326,6 +314,7 @@ def test_target_https_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -350,7 +339,7 @@ def test_target_https_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +348,7 @@ def test_target_https_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -371,7 +361,7 @@ def test_target_https_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -380,6 +370,7 @@ def test_target_https_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -394,7 +385,7 @@ def test_target_https_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -403,6 +394,7 @@ def test_target_https_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -417,7 +409,7 @@ def test_target_https_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -426,6 +418,7 @@ def test_target_https_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -437,35 +430,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetHttpsProxyAggregatedList( id="id_value", - items={ - "key_value": compute.TargetHttpsProxiesScopedList( - target_https_proxies=[ - compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxyAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxyAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -473,29 +456,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.TargetHttpsProxiesScopedList( - target_https_proxies=[ - compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", + request_type=compute.AggregatedListTargetHttpsProxiesRequest, +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): +def test_aggregated_list_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -504,28 +501,35 @@ def test_aggregated_list_rest_flattened(): return_value = compute.TargetHttpsProxyAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxyAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxyAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/targetHttpsProxies" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -536,13 +540,15 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = TargetHttpsProxiesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetHttpsProxyAggregatedList( @@ -578,10 +584,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.TargetHttpsProxiesScopedList) assert pager.get("h") is None @@ -599,7 +604,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.TargetHttpsProxiesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -611,9 +616,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -623,7 +628,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -641,14 +645,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -659,7 +662,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -677,19 +679,39 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): +def test_delete_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -698,31 +720,37 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): +def test_delete_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -742,9 +770,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -768,9 +796,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -794,13 +822,36 @@ def test_get_rest( assert response.url_map == "url_map_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): +def test_get_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -809,31 +860,37 @@ def test_get_rest_flattened(): return_value = compute.TargetHttpsProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): +def test_get_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -853,9 +910,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_https_proxy_resource"] = compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -865,7 +925,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -883,14 +942,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -901,7 +959,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -919,19 +976,42 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_https_proxy_resource"] = compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): +def test_insert_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -940,39 +1020,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_https_proxy_resource = compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - target_https_proxy_resource=target_https_proxy_resource, + target_https_proxy_resource=compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpsProxy.to_json( - target_https_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -994,30 +1075,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetHttpsProxyList( id="id_value", - items=[ - compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1025,22 +1100,41 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetHttpsProxy(authorization_policy="authorization_policy_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetHttpsProxiesRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): +def test_list_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1049,28 +1143,35 @@ def test_list_rest_flattened(): return_value = compute.TargetHttpsProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetHttpsProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetHttpsProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): +def test_list_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1081,13 +1182,15 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetHttpsProxiesClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetHttpsProxyList( @@ -1117,16 +1220,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetHttpsProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1138,9 +1240,12 @@ def test_patch_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init["target_https_proxy_resource"] = compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1150,7 +1255,6 @@ def test_patch_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1168,14 +1272,13 @@ def test_patch_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1186,7 +1289,6 @@ def test_patch_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1204,19 +1306,42 @@ def test_patch_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init["target_https_proxy_resource"] = compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): +def test_patch_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1225,41 +1350,41 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_https_proxy_resource = compute.TargetHttpsProxy( - authorization_policy="authorization_policy_value" - ) - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", - target_https_proxy_resource=target_https_proxy_resource, + target_https_proxy_resource=compute.TargetHttpsProxy( + authorization_policy="authorization_policy_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpsProxy.to_json( - target_https_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1282,9 +1407,14 @@ def test_set_quic_override_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init[ + "target_https_proxies_set_quic_override_request_resource" + ] = compute.TargetHttpsProxiesSetQuicOverrideRequest( + quic_override=compute.TargetHttpsProxiesSetQuicOverrideRequest.QuicOverride.DISABLE + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1294,7 +1424,6 @@ def test_set_quic_override_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1312,14 +1441,13 @@ def test_set_quic_override_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_quic_override(request) @@ -1330,7 +1458,6 @@ def test_set_quic_override_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1348,19 +1475,44 @@ def test_set_quic_override_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_quic_override_rest_bad_request( + transport: str = "rest", request_type=compute.SetQuicOverrideTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init[ + "target_https_proxies_set_quic_override_request_resource" + ] = compute.TargetHttpsProxiesSetQuicOverrideRequest( + quic_override=compute.TargetHttpsProxiesSetQuicOverrideRequest.QuicOverride.DISABLE + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_quic_override(request) + + def test_set_quic_override_rest_from_dict(): test_set_quic_override_rest(request_type=dict) -def test_set_quic_override_rest_flattened(): +def test_set_quic_override_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1369,41 +1521,41 @@ def test_set_quic_override_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_https_proxies_set_quic_override_request_resource = compute.TargetHttpsProxiesSetQuicOverrideRequest( - quic_override=compute.TargetHttpsProxiesSetQuicOverrideRequest.QuicOverride.DISABLE - ) - client.set_quic_override( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", - target_https_proxies_set_quic_override_request_resource=target_https_proxies_set_quic_override_request_resource, + target_https_proxies_set_quic_override_request_resource=compute.TargetHttpsProxiesSetQuicOverrideRequest( + quic_override=compute.TargetHttpsProxiesSetQuicOverrideRequest.QuicOverride.DISABLE + ), ) + mock_args.update(sample_request) + client.set_quic_override(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpsProxiesSetQuicOverrideRequest.to_json( - target_https_proxies_set_quic_override_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_quic_override_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setQuicOverride" + % client.transport._host, + args[1], + ) + + +def test_set_quic_override_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1427,9 +1579,14 @@ def test_set_ssl_certificates_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ] = compute.TargetHttpsProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1439,7 +1596,6 @@ def test_set_ssl_certificates_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1457,14 +1613,13 @@ def test_set_ssl_certificates_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_ssl_certificates(request) @@ -1475,7 +1630,6 @@ def test_set_ssl_certificates_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1493,19 +1647,45 @@ def test_set_ssl_certificates_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_ssl_certificates_rest_bad_request( + transport: str = "rest", + request_type=compute.SetSslCertificatesTargetHttpsProxyRequest, +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init[ + "target_https_proxies_set_ssl_certificates_request_resource" + ] = compute.TargetHttpsProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates(request) + + def test_set_ssl_certificates_rest_from_dict(): test_set_ssl_certificates_rest(request_type=dict) -def test_set_ssl_certificates_rest_flattened(): +def test_set_ssl_certificates_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1514,41 +1694,41 @@ def test_set_ssl_certificates_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_https_proxies_set_ssl_certificates_request_resource = compute.TargetHttpsProxiesSetSslCertificatesRequest( - ssl_certificates=["ssl_certificates_value"] - ) - client.set_ssl_certificates( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", - target_https_proxies_set_ssl_certificates_request_resource=target_https_proxies_set_ssl_certificates_request_resource, + target_https_proxies_set_ssl_certificates_request_resource=compute.TargetHttpsProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ), ) + mock_args.update(sample_request) + client.set_ssl_certificates(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetHttpsProxiesSetSslCertificatesRequest.to_json( - target_https_proxies_set_ssl_certificates_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_ssl_certificates_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setSslCertificates" + % client.transport._host, + args[1], + ) + + +def test_set_ssl_certificates_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1571,9 +1751,12 @@ def test_set_ssl_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init["ssl_policy_reference_resource"] = compute.SslPolicyReference( + ssl_policy="ssl_policy_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1583,7 +1766,6 @@ def test_set_ssl_policy_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1601,14 +1783,13 @@ def test_set_ssl_policy_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_ssl_policy(request) @@ -1619,7 +1800,6 @@ def test_set_ssl_policy_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1637,19 +1817,42 @@ def test_set_ssl_policy_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_ssl_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSslPolicyTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init["ssl_policy_reference_resource"] = compute.SslPolicyReference( + ssl_policy="ssl_policy_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_policy(request) + + def test_set_ssl_policy_rest_from_dict(): test_set_ssl_policy_rest(request_type=dict) -def test_set_ssl_policy_rest_flattened(): +def test_set_ssl_policy_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1658,41 +1861,41 @@ def test_set_ssl_policy_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - ssl_policy_reference_resource = compute.SslPolicyReference( - ssl_policy="ssl_policy_value" - ) - client.set_ssl_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", - ssl_policy_reference_resource=ssl_policy_reference_resource, + ssl_policy_reference_resource=compute.SslPolicyReference( + ssl_policy="ssl_policy_value" + ), ) + mock_args.update(sample_request) + client.set_ssl_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.SslPolicyReference.to_json( - ssl_policy_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_ssl_policy_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetHttpsProxies/{target_https_proxy}/setSslPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_ssl_policy_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1715,9 +1918,12 @@ def test_set_url_map_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1727,7 +1933,6 @@ def test_set_url_map_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1745,14 +1950,13 @@ def test_set_url_map_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_url_map(request) @@ -1763,7 +1967,6 @@ def test_set_url_map_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1781,19 +1984,42 @@ def test_set_url_map_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_url_map_rest_bad_request( + transport: str = "rest", request_type=compute.SetUrlMapTargetHttpsProxyRequest +): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_https_proxy": "sample2"} + request_init["url_map_reference_resource"] = compute.UrlMapReference( + url_map="url_map_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_url_map(request) + + def test_set_url_map_rest_from_dict(): test_set_url_map_rest(request_type=dict) -def test_set_url_map_rest_flattened(): +def test_set_url_map_rest_flattened(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Mock the http request call within the method and fake a response. @@ -1802,39 +2028,39 @@ def test_set_url_map_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_reference_resource = compute.UrlMapReference(url_map="url_map_value") - client.set_url_map( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_https_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_https_proxy="target_https_proxy_value", - url_map_reference_resource=url_map_reference_resource, + url_map_reference_resource=compute.UrlMapReference(url_map="url_map_value"), ) + mock_args.update(sample_request) + client.set_url_map(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_https_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMapReference.to_json( - url_map_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_url_map_rest_flattened_error(): + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/targetHttpsProxies/{target_https_proxy}/setUrlMap" + % client.transport._host, + args[1], + ) + + +def test_set_url_map_rest_flattened_error(transport: str = "rest"): client = TargetHttpsProxiesClient( - credentials=ga_credentials.AnonymousCredentials(), + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # Attempting to call a method with both a request object and flattened @@ -1935,8 +2161,10 @@ def test_target_https_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_https_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1960,29 +2188,6 @@ def test_target_https_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_https_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_https_proxies.transports.TargetHttpsProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetHttpsProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_https_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1994,7 +2199,6 @@ def test_target_https_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_https_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2010,21 +2214,6 @@ def test_target_https_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_https_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetHttpsProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_https_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2171,3 +2360,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetHttpsProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_instances.py b/tests/unit/gapic/compute_v1/test_target_instances.py index 4a0498ee5..2f63c1eca 100644 --- a/tests/unit/gapic/compute_v1/test_target_instances.py +++ b/tests/unit/gapic/compute_v1/test_target_instances.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_instances import TargetInstancesClient from google.cloud.compute_v1.services.target_instances import pagers from google.cloud.compute_v1.services.target_instances import transports -from google.cloud.compute_v1.services.target_instances.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_target_instances_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_target_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_target_instances_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_target_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_target_instances_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_target_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_target_instances_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_target_instances_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -306,7 +294,7 @@ def test_target_instances_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -323,6 +311,7 @@ def test_target_instances_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -347,7 +336,7 @@ def test_target_instances_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -356,6 +345,7 @@ def test_target_instances_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -368,7 +358,7 @@ def test_target_instances_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +367,7 @@ def test_target_instances_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +382,7 @@ def test_target_instances_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -400,6 +391,7 @@ def test_target_instances_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -414,7 +406,7 @@ def test_target_instances_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -423,6 +415,7 @@ def test_target_instances_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -433,35 +426,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetInstanceAggregatedList( id="id_value", - items={ - "key_value": compute.TargetInstancesScopedList( - target_instances=[ - compute.TargetInstance( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetInstanceAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetInstanceAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -469,26 +452,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.TargetInstancesScopedList( - target_instances=[ - compute.TargetInstance(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListTargetInstancesRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -496,27 +496,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.TargetInstanceAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetInstanceAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetInstanceAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/targetInstances" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -526,11 +535,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetInstanceAggregatedList( @@ -566,10 +577,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.TargetInstancesScopedList) assert pager.get("h") is None @@ -587,7 +597,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.TargetInstancesScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -599,9 +609,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -611,7 +625,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -629,14 +642,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -647,7 +659,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -665,18 +676,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetInstanceRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -684,33 +721,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", target_instance="target_instance_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "target_instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -730,9 +778,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -751,9 +803,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetInstance.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetInstance.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -772,12 +824,41 @@ def test_get_rest( assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetInstanceRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -785,33 +866,44 @@ def test_get_rest_flattened(): return_value = compute.TargetInstance() # Wrap the value into a proper Response obj - json_return_value = compute.TargetInstance.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetInstance.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "target_instance": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", target_instance="target_instance_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "target_instance_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/targetInstances/{target_instance}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -831,9 +923,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["target_instance_resource"] = compute.TargetInstance( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -843,7 +938,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -861,14 +955,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -879,7 +972,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -897,18 +989,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetInstanceRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["target_instance_resource"] = compute.TargetInstance( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -916,40 +1033,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_instance_resource = compute.TargetInstance( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", - target_instance_resource=target_instance_resource, + target_instance_resource=compute.TargetInstance( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert compute.TargetInstance.to_json( - target_instance_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/targetInstances" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -971,28 +1090,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetInstanceList( id="id_value", - items=[ - compute.TargetInstance(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetInstanceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetInstanceList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1000,21 +1115,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetInstance(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetInstancesRequest +): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1022,30 +1158,36 @@ def test_list_rest_flattened(): return_value = compute.TargetInstanceList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetInstanceList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetInstanceList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/targetInstances" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1057,11 +1199,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetInstancesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetInstanceList( @@ -1091,16 +1235,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetInstance) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1185,8 +1328,10 @@ def test_target_instances_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_instances_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1210,29 +1355,6 @@ def test_target_instances_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_instances_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_instances.transports.TargetInstancesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetInstancesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_instances_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1244,7 +1366,6 @@ def test_target_instances_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_instances_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1260,21 +1381,6 @@ def test_target_instances_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_instances_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetInstancesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_instances_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1421,3 +1527,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetInstancesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_pools.py b/tests/unit/gapic/compute_v1/test_target_pools.py index 8ece4a9b1..e90f96af2 100644 --- a/tests/unit/gapic/compute_v1/test_target_pools.py +++ b/tests/unit/gapic/compute_v1/test_target_pools.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_pools import TargetPoolsClient from google.cloud.compute_v1.services.target_pools import pagers from google.cloud.compute_v1.services.target_pools import transports -from google.cloud.compute_v1.services.target_pools.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_target_pools_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_target_pools_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_target_pools_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_target_pools_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_target_pools_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_target_pools_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_target_pools_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_target_pools_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_target_pools_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_target_pools_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_target_pools_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_target_pools_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_target_pools_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_target_pools_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_target_pools_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_target_pools_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_target_pools_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_target_pools_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,9 +408,14 @@ def test_add_health_check_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_add_health_check_request_resource" + ] = compute.TargetPoolsAddHealthCheckRequest( + health_checks=[compute.HealthCheckReference(health_check="health_check_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -427,7 +425,6 @@ def test_add_health_check_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -445,14 +442,13 @@ def test_add_health_check_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_health_check(request) @@ -463,7 +459,6 @@ def test_add_health_check_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -481,18 +476,45 @@ def test_add_health_check_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_health_check_rest_bad_request( + transport: str = "rest", request_type=compute.AddHealthCheckTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_add_health_check_request_resource" + ] = compute.TargetPoolsAddHealthCheckRequest( + health_checks=[compute.HealthCheckReference(health_check="health_check_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_health_check(request) + + def test_add_health_check_rest_from_dict(): test_add_health_check_rest(request_type=dict) -def test_add_health_check_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_health_check_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -500,44 +522,49 @@ def test_add_health_check_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_pools_add_health_check_request_resource = compute.TargetPoolsAddHealthCheckRequest( - health_checks=[ - compute.HealthCheckReference(health_check="health_check_value") - ] - ) - client.add_health_check( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", - target_pools_add_health_check_request_resource=target_pools_add_health_check_request_resource, + target_pools_add_health_check_request_resource=compute.TargetPoolsAddHealthCheckRequest( + health_checks=[ + compute.HealthCheckReference(health_check="health_check_value") + ] + ), ) + mock_args.update(sample_request) + client.add_health_check(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) - assert compute.TargetPoolsAddHealthCheckRequest.to_json( - target_pools_add_health_check_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_health_check_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addHealthCheck" + % client.transport._host, + args[1], + ) + + +def test_add_health_check_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -562,9 +589,14 @@ def test_add_instance_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_add_instance_request_resource" + ] = compute.TargetPoolsAddInstanceRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -574,7 +606,6 @@ def test_add_instance_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -592,14 +623,13 @@ def test_add_instance_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.add_instance(request) @@ -610,7 +640,6 @@ def test_add_instance_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -628,18 +657,45 @@ def test_add_instance_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_add_instance_rest_bad_request( + transport: str = "rest", request_type=compute.AddInstanceTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_add_instance_request_resource" + ] = compute.TargetPoolsAddInstanceRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.add_instance(request) + + def test_add_instance_rest_from_dict(): test_add_instance_rest(request_type=dict) -def test_add_instance_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_add_instance_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -647,42 +703,47 @@ def test_add_instance_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_pools_add_instance_request_resource = compute.TargetPoolsAddInstanceRequest( - instances=[compute.InstanceReference(instance="instance_value")] - ) - client.add_instance( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", - target_pools_add_instance_request_resource=target_pools_add_instance_request_resource, + target_pools_add_instance_request_resource=compute.TargetPoolsAddInstanceRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ), ) + mock_args.update(sample_request) + client.add_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) - assert compute.TargetPoolsAddInstanceRequest.to_json( - target_pools_add_instance_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_add_instance_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/addInstance" + % client.transport._host, + args[1], + ) + + +def test_add_instance_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -705,31 +766,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetPoolAggregatedList( id="id_value", - items={ - "key_value": compute.TargetPoolsScopedList( - target_pools=[compute.TargetPool(backup_pool="backup_pool_value")] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetPoolAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPoolAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -737,24 +792,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.TargetPoolsScopedList( - target_pools=[compute.TargetPool(backup_pool="backup_pool_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListTargetPoolsRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -762,27 +836,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.TargetPoolAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetPoolAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPoolAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/targetPools" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -792,11 +875,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetPoolAggregatedList( @@ -829,10 +914,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.TargetPoolsScopedList) assert pager.get("h") is None @@ -850,7 +934,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.TargetPoolsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -862,9 +946,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -874,7 +958,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -892,14 +975,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -910,7 +992,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -928,18 +1009,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -947,33 +1050,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -991,9 +1105,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetTargetPoolReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1014,9 +1128,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetTargetPoolReq ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetPool.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPool.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -1037,12 +1151,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetTargetPoolReq assert response.session_affinity == compute.TargetPool.SessionAffinity.CLIENT_IP +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1050,33 +1189,44 @@ def test_get_rest_flattened(): return_value = compute.TargetPool() # Wrap the value into a proper Response obj - json_return_value = compute.TargetPool.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPool.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1096,42 +1246,65 @@ def test_get_health_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init["instance_reference_resource"] = compute.InstanceReference( + instance="instance_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.TargetPoolInstanceHealth( - health_status=[ - compute.HealthStatus(annotations={"key_value": "value_value"}) - ], - kind="kind_value", - ) + return_value = compute.TargetPoolInstanceHealth(kind="kind_value",) # Wrap the value into a proper Response obj - json_return_value = compute.TargetPoolInstanceHealth.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPoolInstanceHealth.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_health(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.TargetPoolInstanceHealth) - assert response.health_status == [ - compute.HealthStatus(annotations={"key_value": "value_value"}) - ] assert response.kind == "kind_value" +def test_get_health_rest_bad_request( + transport: str = "rest", request_type=compute.GetHealthTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init["instance_reference_resource"] = compute.InstanceReference( + instance="instance_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_health(request) + + def test_get_health_rest_from_dict(): test_get_health_rest(request_type=dict) -def test_get_health_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_health_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1139,42 +1312,47 @@ def test_get_health_rest_flattened(): return_value = compute.TargetPoolInstanceHealth() # Wrap the value into a proper Response obj - json_return_value = compute.TargetPoolInstanceHealth.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPoolInstanceHealth.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - instance_reference_resource = compute.InstanceReference( - instance="instance_value" - ) - client.get_health( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", - instance_reference_resource=instance_reference_resource, + instance_reference_resource=compute.InstanceReference( + instance="instance_value" + ), ) + mock_args.update(sample_request) + client.get_health(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) - assert compute.InstanceReference.to_json( - instance_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_get_health_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1197,9 +1375,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_pool_resource"] = compute.TargetPool( + backup_pool="backup_pool_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1209,7 +1390,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1227,14 +1407,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -1245,7 +1424,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1263,18 +1441,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_pool_resource"] = compute.TargetPool( + backup_pool="backup_pool_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1282,38 +1485,40 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_pool_resource = compute.TargetPool(backup_pool="backup_pool_value") - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - target_pool_resource=target_pool_resource, + target_pool_resource=compute.TargetPool(backup_pool="backup_pool_value"), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.TargetPool.to_json( - target_pool_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1333,26 +1538,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetPoolList( id="id_value", - items=[compute.TargetPool(backup_pool="backup_pool_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetPoolList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPoolList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1360,19 +1563,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [compute.TargetPool(backup_pool="backup_pool_value")] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetPoolsRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1380,30 +1606,36 @@ def test_list_rest_flattened(): return_value = compute.TargetPoolList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetPoolList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetPoolList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1415,11 +1647,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetPoolList( @@ -1449,16 +1683,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetPool) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1470,9 +1703,14 @@ def test_remove_health_check_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_remove_health_check_request_resource" + ] = compute.TargetPoolsRemoveHealthCheckRequest( + health_checks=[compute.HealthCheckReference(health_check="health_check_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1482,7 +1720,6 @@ def test_remove_health_check_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1500,14 +1737,13 @@ def test_remove_health_check_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_health_check(request) @@ -1518,7 +1754,6 @@ def test_remove_health_check_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1536,18 +1771,45 @@ def test_remove_health_check_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_health_check_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveHealthCheckTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_remove_health_check_request_resource" + ] = compute.TargetPoolsRemoveHealthCheckRequest( + health_checks=[compute.HealthCheckReference(health_check="health_check_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_health_check(request) + + def test_remove_health_check_rest_from_dict(): test_remove_health_check_rest(request_type=dict) -def test_remove_health_check_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_health_check_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1555,44 +1817,49 @@ def test_remove_health_check_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_pools_remove_health_check_request_resource = compute.TargetPoolsRemoveHealthCheckRequest( - health_checks=[ - compute.HealthCheckReference(health_check="health_check_value") - ] - ) - client.remove_health_check( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", - target_pools_remove_health_check_request_resource=target_pools_remove_health_check_request_resource, + target_pools_remove_health_check_request_resource=compute.TargetPoolsRemoveHealthCheckRequest( + health_checks=[ + compute.HealthCheckReference(health_check="health_check_value") + ] + ), ) + mock_args.update(sample_request) + client.remove_health_check(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) - assert compute.TargetPoolsRemoveHealthCheckRequest.to_json( - target_pools_remove_health_check_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_health_check_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeHealthCheck" + % client.transport._host, + args[1], + ) + + +def test_remove_health_check_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1617,9 +1884,14 @@ def test_remove_instance_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_remove_instance_request_resource" + ] = compute.TargetPoolsRemoveInstanceRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1629,7 +1901,6 @@ def test_remove_instance_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1647,14 +1918,13 @@ def test_remove_instance_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.remove_instance(request) @@ -1665,7 +1935,6 @@ def test_remove_instance_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1683,18 +1952,45 @@ def test_remove_instance_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_remove_instance_rest_bad_request( + transport: str = "rest", request_type=compute.RemoveInstanceTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init[ + "target_pools_remove_instance_request_resource" + ] = compute.TargetPoolsRemoveInstanceRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.remove_instance(request) + + def test_remove_instance_rest_from_dict(): test_remove_instance_rest(request_type=dict) -def test_remove_instance_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_remove_instance_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1702,42 +1998,47 @@ def test_remove_instance_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_pools_remove_instance_request_resource = compute.TargetPoolsRemoveInstanceRequest( - instances=[compute.InstanceReference(instance="instance_value")] - ) - client.remove_instance( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", - target_pools_remove_instance_request_resource=target_pools_remove_instance_request_resource, + target_pools_remove_instance_request_resource=compute.TargetPoolsRemoveInstanceRequest( + instances=[compute.InstanceReference(instance="instance_value")] + ), ) + mock_args.update(sample_request) + client.remove_instance(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) - assert compute.TargetPoolsRemoveInstanceRequest.to_json( - target_pools_remove_instance_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_remove_instance_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/removeInstance" + % client.transport._host, + args[1], + ) + + +def test_remove_instance_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1760,9 +2061,12 @@ def test_set_backup_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init["target_reference_resource"] = compute.TargetReference( + target="target_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1772,7 +2076,6 @@ def test_set_backup_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1790,14 +2093,13 @@ def test_set_backup_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_backup(request) @@ -1808,7 +2110,6 @@ def test_set_backup_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1826,18 +2127,43 @@ def test_set_backup_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_backup_rest_bad_request( + transport: str = "rest", request_type=compute.SetBackupTargetPoolRequest +): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "target_pool": "sample3"} + request_init["target_reference_resource"] = compute.TargetReference( + target="target_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backup(request) + + def test_set_backup_rest_from_dict(): test_set_backup_rest(request_type=dict) -def test_set_backup_rest_flattened(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_backup_rest_flattened(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1845,40 +2171,45 @@ def test_set_backup_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_reference_resource = compute.TargetReference(target="target_value") - client.set_backup( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_pool": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_pool="target_pool_value", - target_reference_resource=target_reference_resource, + target_reference_resource=compute.TargetReference(target="target_value"), ) + mock_args.update(sample_request) + client.set_backup(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_pool_value" in http_call[1] + str(body) + str(params) - assert compute.TargetReference.to_json( - target_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_backup_rest_flattened_error(): - client = TargetPoolsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetPools/{target_pool}/setBackup" + % client.transport._host, + args[1], + ) + + +def test_set_backup_rest_flattened_error(transport: str = "rest"): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1978,8 +2309,10 @@ def test_target_pools_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_pools_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -2003,29 +2336,6 @@ def test_target_pools_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_pools_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_pools.transports.TargetPoolsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetPoolsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_pools_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -2037,7 +2347,6 @@ def test_target_pools_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_pools_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -2053,21 +2362,6 @@ def test_target_pools_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_pools_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetPoolsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_pools_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -2214,3 +2508,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetPoolsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py b/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py index 5360adbfb..2f5a3ef7a 100644 --- a/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_ssl_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_ssl_proxies import TargetSslProxiesClient from google.cloud.compute_v1.services.target_ssl_proxies import pagers from google.cloud.compute_v1.services.target_ssl_proxies import transports -from google.cloud.compute_v1.services.target_ssl_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_target_ssl_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_target_ssl_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_target_ssl_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_target_ssl_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_target_ssl_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_target_ssl_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_target_ssl_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_target_ssl_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_target_ssl_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_target_ssl_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_target_ssl_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_target_ssl_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_target_ssl_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_target_ssl_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_target_ssl_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_target_ssl_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_target_ssl_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_target_ssl_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +439,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +456,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -482,7 +473,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +490,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetSslProxyRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,30 +531,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_ssl_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_ssl_proxy="target_ssl_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_ssl_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -561,9 +581,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -582,9 +602,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetSslProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetSslProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -603,12 +623,37 @@ def test_get_rest( assert response.ssl_policy == "ssl_policy_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetSslProxyRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -616,30 +661,38 @@ def test_get_rest_flattened(): return_value = compute.TargetSslProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetSslProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetSslProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_ssl_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_ssl_proxy="target_ssl_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_ssl_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -658,9 +711,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_ssl_proxy_resource"] = compute.TargetSslProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -670,7 +726,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -688,14 +743,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -706,7 +760,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -724,18 +777,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetSslProxyRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_ssl_proxy_resource"] = compute.TargetSslProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -743,38 +821,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_ssl_proxy_resource = compute.TargetSslProxy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - target_ssl_proxy_resource=target_ssl_proxy_resource, + target_ssl_proxy_resource=compute.TargetSslProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.TargetSslProxy.to_json( - target_ssl_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -795,28 +876,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetSslProxyList( id="id_value", - items=[ - compute.TargetSslProxy(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetSslProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetSslProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -824,21 +901,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetSslProxy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetSslProxiesRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -846,27 +944,36 @@ def test_list_rest_flattened(): return_value = compute.TargetSslProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetSslProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetSslProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -876,11 +983,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetSslProxyList( @@ -910,16 +1019,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetSslProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -931,9 +1039,12 @@ def test_set_backend_service_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ] = compute.TargetSslProxiesSetBackendServiceRequest(service="service_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -943,7 +1054,6 @@ def test_set_backend_service_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -961,14 +1071,13 @@ def test_set_backend_service_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_backend_service(request) @@ -979,7 +1088,6 @@ def test_set_backend_service_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -997,18 +1105,43 @@ def test_set_backend_service_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_backend_service_rest_bad_request( + transport: str = "rest", request_type=compute.SetBackendServiceTargetSslProxyRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init[ + "target_ssl_proxies_set_backend_service_request_resource" + ] = compute.TargetSslProxiesSetBackendServiceRequest(service="service_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backend_service(request) + + def test_set_backend_service_rest_from_dict(): test_set_backend_service_rest(request_type=dict) -def test_set_backend_service_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_backend_service_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1016,40 +1149,42 @@ def test_set_backend_service_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_ssl_proxies_set_backend_service_request_resource = compute.TargetSslProxiesSetBackendServiceRequest( - service="service_value" - ) - client.set_backend_service( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_ssl_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_ssl_proxy="target_ssl_proxy_value", - target_ssl_proxies_set_backend_service_request_resource=target_ssl_proxies_set_backend_service_request_resource, + target_ssl_proxies_set_backend_service_request_resource=compute.TargetSslProxiesSetBackendServiceRequest( + service="service_value" + ), ) + mock_args.update(sample_request) + client.set_backend_service(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_ssl_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetSslProxiesSetBackendServiceRequest.to_json( - target_ssl_proxies_set_backend_service_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_backend_service_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setBackendService" + % client.transport._host, + args[1], + ) + + +def test_set_backend_service_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1071,9 +1206,14 @@ def test_set_proxy_header_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ] = compute.TargetSslProxiesSetProxyHeaderRequest( + proxy_header=compute.TargetSslProxiesSetProxyHeaderRequest.ProxyHeader.NONE + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1083,7 +1223,6 @@ def test_set_proxy_header_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1101,14 +1240,13 @@ def test_set_proxy_header_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_proxy_header(request) @@ -1119,7 +1257,6 @@ def test_set_proxy_header_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1137,18 +1274,45 @@ def test_set_proxy_header_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_proxy_header_rest_bad_request( + transport: str = "rest", request_type=compute.SetProxyHeaderTargetSslProxyRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init[ + "target_ssl_proxies_set_proxy_header_request_resource" + ] = compute.TargetSslProxiesSetProxyHeaderRequest( + proxy_header=compute.TargetSslProxiesSetProxyHeaderRequest.ProxyHeader.NONE + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_proxy_header(request) + + def test_set_proxy_header_rest_from_dict(): test_set_proxy_header_rest(request_type=dict) -def test_set_proxy_header_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_proxy_header_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1156,40 +1320,42 @@ def test_set_proxy_header_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_ssl_proxies_set_proxy_header_request_resource = compute.TargetSslProxiesSetProxyHeaderRequest( - proxy_header=compute.TargetSslProxiesSetProxyHeaderRequest.ProxyHeader.NONE - ) - client.set_proxy_header( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_ssl_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_ssl_proxy="target_ssl_proxy_value", - target_ssl_proxies_set_proxy_header_request_resource=target_ssl_proxies_set_proxy_header_request_resource, + target_ssl_proxies_set_proxy_header_request_resource=compute.TargetSslProxiesSetProxyHeaderRequest( + proxy_header=compute.TargetSslProxiesSetProxyHeaderRequest.ProxyHeader.NONE + ), ) + mock_args.update(sample_request) + client.set_proxy_header(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_ssl_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetSslProxiesSetProxyHeaderRequest.to_json( - target_ssl_proxies_set_proxy_header_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_proxy_header_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setProxyHeader" + % client.transport._host, + args[1], + ) + + +def test_set_proxy_header_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1212,9 +1378,14 @@ def test_set_ssl_certificates_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ] = compute.TargetSslProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1224,7 +1395,6 @@ def test_set_ssl_certificates_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1242,14 +1412,13 @@ def test_set_ssl_certificates_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_ssl_certificates(request) @@ -1260,7 +1429,6 @@ def test_set_ssl_certificates_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1278,18 +1446,46 @@ def test_set_ssl_certificates_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_ssl_certificates_rest_bad_request( + transport: str = "rest", + request_type=compute.SetSslCertificatesTargetSslProxyRequest, +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init[ + "target_ssl_proxies_set_ssl_certificates_request_resource" + ] = compute.TargetSslProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_certificates(request) + + def test_set_ssl_certificates_rest_from_dict(): test_set_ssl_certificates_rest(request_type=dict) -def test_set_ssl_certificates_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_ssl_certificates_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1297,40 +1493,42 @@ def test_set_ssl_certificates_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_ssl_proxies_set_ssl_certificates_request_resource = compute.TargetSslProxiesSetSslCertificatesRequest( - ssl_certificates=["ssl_certificates_value"] - ) - client.set_ssl_certificates( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_ssl_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_ssl_proxy="target_ssl_proxy_value", - target_ssl_proxies_set_ssl_certificates_request_resource=target_ssl_proxies_set_ssl_certificates_request_resource, + target_ssl_proxies_set_ssl_certificates_request_resource=compute.TargetSslProxiesSetSslCertificatesRequest( + ssl_certificates=["ssl_certificates_value"] + ), ) + mock_args.update(sample_request) + client.set_ssl_certificates(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_ssl_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetSslProxiesSetSslCertificatesRequest.to_json( - target_ssl_proxies_set_ssl_certificates_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_ssl_certificates_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslCertificates" + % client.transport._host, + args[1], + ) + + +def test_set_ssl_certificates_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1352,9 +1550,12 @@ def test_set_ssl_policy_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init["ssl_policy_reference_resource"] = compute.SslPolicyReference( + ssl_policy="ssl_policy_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1364,7 +1565,6 @@ def test_set_ssl_policy_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1382,14 +1582,13 @@ def test_set_ssl_policy_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_ssl_policy(request) @@ -1400,7 +1599,6 @@ def test_set_ssl_policy_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1418,18 +1616,43 @@ def test_set_ssl_policy_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_ssl_policy_rest_bad_request( + transport: str = "rest", request_type=compute.SetSslPolicyTargetSslProxyRequest +): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_ssl_proxy": "sample2"} + request_init["ssl_policy_reference_resource"] = compute.SslPolicyReference( + ssl_policy="ssl_policy_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_ssl_policy(request) + + def test_set_ssl_policy_rest_from_dict(): test_set_ssl_policy_rest(request_type=dict) -def test_set_ssl_policy_rest_flattened(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_ssl_policy_rest_flattened(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1437,40 +1660,42 @@ def test_set_ssl_policy_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - ssl_policy_reference_resource = compute.SslPolicyReference( - ssl_policy="ssl_policy_value" - ) - client.set_ssl_policy( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_ssl_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_ssl_proxy="target_ssl_proxy_value", - ssl_policy_reference_resource=ssl_policy_reference_resource, + ssl_policy_reference_resource=compute.SslPolicyReference( + ssl_policy="ssl_policy_value" + ), ) + mock_args.update(sample_request) + client.set_ssl_policy(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_ssl_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.SslPolicyReference.to_json( - ssl_policy_reference_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_ssl_policy_rest_flattened_error(): - client = TargetSslProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetSslProxies/{target_ssl_proxy}/setSslPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_ssl_policy_rest_flattened_error(transport: str = "rest"): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1568,8 +1793,10 @@ def test_target_ssl_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_ssl_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1593,29 +1820,6 @@ def test_target_ssl_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_ssl_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_ssl_proxies.transports.TargetSslProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetSslProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_ssl_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1627,7 +1831,6 @@ def test_target_ssl_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_ssl_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1643,21 +1846,6 @@ def test_target_ssl_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_ssl_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetSslProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_ssl_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1804,3 +1992,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetSslProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py b/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py index 6213882aa..e0253fd6c 100644 --- a/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py +++ b/tests/unit/gapic/compute_v1/test_target_tcp_proxies.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_tcp_proxies import TargetTcpProxiesClient from google.cloud.compute_v1.services.target_tcp_proxies import pagers from google.cloud.compute_v1.services.target_tcp_proxies import transports -from google.cloud.compute_v1.services.target_tcp_proxies.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_target_tcp_proxies_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_target_tcp_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_target_tcp_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_target_tcp_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_target_tcp_proxies_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_target_tcp_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_target_tcp_proxies_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_target_tcp_proxies_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_target_tcp_proxies_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_target_tcp_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_target_tcp_proxies_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_target_tcp_proxies_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_target_tcp_proxies_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_target_tcp_proxies_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_target_tcp_proxies_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_target_tcp_proxies_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_target_tcp_proxies_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_target_tcp_proxies_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,9 +427,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -446,7 +439,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -464,14 +456,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -482,7 +473,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -500,18 +490,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetTcpProxyRequest +): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -519,30 +531,38 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_tcp_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_tcp_proxy="target_tcp_proxy_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_tcp_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -561,9 +581,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -581,9 +601,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetTcpProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetTcpProxy.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -601,12 +621,37 @@ def test_get_rest( assert response.service == "service_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetTcpProxyRequest +): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -614,30 +659,38 @@ def test_get_rest_flattened(): return_value = compute.TargetTcpProxy() # Wrap the value into a proper Response obj - json_return_value = compute.TargetTcpProxy.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetTcpProxy.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_tcp_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_tcp_proxy="target_tcp_proxy_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_tcp_proxy_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -656,9 +709,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_tcp_proxy_resource"] = compute.TargetTcpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -668,7 +724,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -686,14 +741,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -704,7 +758,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -722,18 +775,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetTcpProxyRequest +): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["target_tcp_proxy_resource"] = compute.TargetTcpProxy( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -741,38 +819,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_tcp_proxy_resource = compute.TargetTcpProxy( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", - target_tcp_proxy_resource=target_tcp_proxy_resource, + target_tcp_proxy_resource=compute.TargetTcpProxy( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.TargetTcpProxy.to_json( - target_tcp_proxy_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetTcpProxies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -793,28 +874,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetTcpProxyList( id="id_value", - items=[ - compute.TargetTcpProxy(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetTcpProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetTcpProxyList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -822,21 +899,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetTcpProxy(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetTcpProxiesRequest +): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -844,27 +942,36 @@ def test_list_rest_flattened(): return_value = compute.TargetTcpProxyList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetTcpProxyList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetTcpProxyList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetTcpProxies" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -874,11 +981,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetTcpProxyList( @@ -908,16 +1017,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetTcpProxy) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -929,9 +1037,12 @@ def test_set_backend_service_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ] = compute.TargetTcpProxiesSetBackendServiceRequest(service="service_value") + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -941,7 +1052,6 @@ def test_set_backend_service_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -959,14 +1069,13 @@ def test_set_backend_service_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_backend_service(request) @@ -977,7 +1086,6 @@ def test_set_backend_service_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -995,18 +1103,43 @@ def test_set_backend_service_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_backend_service_rest_bad_request( + transport: str = "rest", request_type=compute.SetBackendServiceTargetTcpProxyRequest +): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request_init[ + "target_tcp_proxies_set_backend_service_request_resource" + ] = compute.TargetTcpProxiesSetBackendServiceRequest(service="service_value") + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_backend_service(request) + + def test_set_backend_service_rest_from_dict(): test_set_backend_service_rest(request_type=dict) -def test_set_backend_service_rest_flattened(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_backend_service_rest_flattened(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1014,40 +1147,42 @@ def test_set_backend_service_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_tcp_proxies_set_backend_service_request_resource = compute.TargetTcpProxiesSetBackendServiceRequest( - service="service_value" - ) - client.set_backend_service( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_tcp_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_tcp_proxy="target_tcp_proxy_value", - target_tcp_proxies_set_backend_service_request_resource=target_tcp_proxies_set_backend_service_request_resource, + target_tcp_proxies_set_backend_service_request_resource=compute.TargetTcpProxiesSetBackendServiceRequest( + service="service_value" + ), ) + mock_args.update(sample_request) + client.set_backend_service(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_tcp_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetTcpProxiesSetBackendServiceRequest.to_json( - target_tcp_proxies_set_backend_service_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_backend_service_rest_flattened_error(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setBackendService" + % client.transport._host, + args[1], + ) + + +def test_set_backend_service_rest_flattened_error(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1069,9 +1204,14 @@ def test_set_proxy_header_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ] = compute.TargetTcpProxiesSetProxyHeaderRequest( + proxy_header=compute.TargetTcpProxiesSetProxyHeaderRequest.ProxyHeader.NONE + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1081,7 +1221,6 @@ def test_set_proxy_header_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1099,14 +1238,13 @@ def test_set_proxy_header_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_proxy_header(request) @@ -1117,7 +1255,6 @@ def test_set_proxy_header_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1135,18 +1272,45 @@ def test_set_proxy_header_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_proxy_header_rest_bad_request( + transport: str = "rest", request_type=compute.SetProxyHeaderTargetTcpProxyRequest +): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "target_tcp_proxy": "sample2"} + request_init[ + "target_tcp_proxies_set_proxy_header_request_resource" + ] = compute.TargetTcpProxiesSetProxyHeaderRequest( + proxy_header=compute.TargetTcpProxiesSetProxyHeaderRequest.ProxyHeader.NONE + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_proxy_header(request) + + def test_set_proxy_header_rest_from_dict(): test_set_proxy_header_rest(request_type=dict) -def test_set_proxy_header_rest_flattened(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_proxy_header_rest_flattened(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1154,40 +1318,42 @@ def test_set_proxy_header_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_tcp_proxies_set_proxy_header_request_resource = compute.TargetTcpProxiesSetProxyHeaderRequest( - proxy_header=compute.TargetTcpProxiesSetProxyHeaderRequest.ProxyHeader.NONE - ) - client.set_proxy_header( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "target_tcp_proxy": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", target_tcp_proxy="target_tcp_proxy_value", - target_tcp_proxies_set_proxy_header_request_resource=target_tcp_proxies_set_proxy_header_request_resource, + target_tcp_proxies_set_proxy_header_request_resource=compute.TargetTcpProxiesSetProxyHeaderRequest( + proxy_header=compute.TargetTcpProxiesSetProxyHeaderRequest.ProxyHeader.NONE + ), ) + mock_args.update(sample_request) + client.set_proxy_header(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "target_tcp_proxy_value" in http_call[1] + str(body) + str(params) - assert compute.TargetTcpProxiesSetProxyHeaderRequest.to_json( - target_tcp_proxies_set_proxy_header_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_proxy_header_rest_flattened_error(): - client = TargetTcpProxiesClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/targetTcpProxies/{target_tcp_proxy}/setProxyHeader" + % client.transport._host, + args[1], + ) + + +def test_set_proxy_header_rest_flattened_error(transport: str = "rest"): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1283,8 +1449,10 @@ def test_target_tcp_proxies_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_tcp_proxies_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1308,29 +1476,6 @@ def test_target_tcp_proxies_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_tcp_proxies_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_tcp_proxies.transports.TargetTcpProxiesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetTcpProxiesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_tcp_proxies_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1342,7 +1487,6 @@ def test_target_tcp_proxies_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_tcp_proxies_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1358,21 +1502,6 @@ def test_target_tcp_proxies_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_tcp_proxies_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetTcpProxiesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_tcp_proxies_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1519,3 +1648,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetTcpProxiesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py b/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py index 16bbd97aa..7b656b9a7 100644 --- a/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py +++ b/tests/unit/gapic/compute_v1/test_target_vpn_gateways.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.target_vpn_gateways import TargetVpnGatewaysClient from google.cloud.compute_v1.services.target_vpn_gateways import pagers from google.cloud.compute_v1.services.target_vpn_gateways import transports -from google.cloud.compute_v1.services.target_vpn_gateways.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -196,7 +180,7 @@ def test_target_vpn_gateways_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +189,7 @@ def test_target_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +197,7 @@ def test_target_vpn_gateways_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +206,7 @@ def test_target_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -228,7 +214,7 @@ def test_target_vpn_gateways_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -237,6 +223,7 @@ def test_target_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -256,7 +243,7 @@ def test_target_vpn_gateways_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -265,6 +252,7 @@ def test_target_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -307,7 +295,7 @@ def test_target_vpn_gateways_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -324,6 +312,7 @@ def test_target_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -348,7 +337,7 @@ def test_target_vpn_gateways_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -357,6 +346,7 @@ def test_target_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -369,7 +359,7 @@ def test_target_vpn_gateways_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -378,6 +368,7 @@ def test_target_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -392,7 +383,7 @@ def test_target_vpn_gateways_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -401,6 +392,7 @@ def test_target_vpn_gateways_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,7 +407,7 @@ def test_target_vpn_gateways_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -424,6 +416,7 @@ def test_target_vpn_gateways_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -434,35 +427,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetVpnGatewayAggregatedList( id="id_value", - items={ - "key_value": compute.TargetVpnGatewaysScopedList( - target_vpn_gateways=[ - compute.TargetVpnGateway( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetVpnGatewayAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetVpnGatewayAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -470,26 +453,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.TargetVpnGatewaysScopedList( - target_vpn_gateways=[ - compute.TargetVpnGateway(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListTargetVpnGatewaysRequest +): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -497,27 +497,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.TargetVpnGatewayAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetVpnGatewayAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetVpnGatewayAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/targetVpnGateways" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -527,11 +536,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetVpnGatewayAggregatedList( @@ -567,10 +578,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.TargetVpnGatewaysScopedList) assert pager.get("h") is None @@ -588,7 +598,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.TargetVpnGatewaysScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -600,9 +610,13 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_vpn_gateway": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -612,7 +626,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -630,14 +643,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -648,7 +660,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -666,18 +677,44 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteTargetVpnGatewayRequest +): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_vpn_gateway": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -685,33 +722,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_vpn_gateway": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_vpn_gateway="target_vpn_gateway_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -731,9 +779,13 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_vpn_gateway": "sample3", + } + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -753,9 +805,9 @@ def test_get_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetVpnGateway.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetVpnGateway.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -775,12 +827,41 @@ def test_get_rest( assert response.tunnels == ["tunnels_value"] +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetTargetVpnGatewayRequest +): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "target_vpn_gateway": "sample3", + } + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -788,33 +869,44 @@ def test_get_rest_flattened(): return_value = compute.TargetVpnGateway() # Wrap the value into a proper Response obj - json_return_value = compute.TargetVpnGateway.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetVpnGateway.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "target_vpn_gateway": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", target_vpn_gateway="target_vpn_gateway_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "target_vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways/{target_vpn_gateway}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -834,9 +926,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_vpn_gateway_resource"] = compute.TargetVpnGateway( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -846,7 +941,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -864,14 +958,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -882,7 +975,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -900,18 +992,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertTargetVpnGatewayRequest +): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["target_vpn_gateway_resource"] = compute.TargetVpnGateway( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -919,40 +1036,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - target_vpn_gateway_resource = compute.TargetVpnGateway( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - target_vpn_gateway_resource=target_vpn_gateway_resource, + target_vpn_gateway_resource=compute.TargetVpnGateway( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.TargetVpnGateway.to_json( - target_vpn_gateway_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -974,28 +1093,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.TargetVpnGatewayList( id="id_value", - items=[ - compute.TargetVpnGateway(creation_timestamp="creation_timestamp_value") - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.TargetVpnGatewayList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetVpnGatewayList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1003,21 +1118,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.TargetVpnGateway(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListTargetVpnGatewaysRequest +): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1025,30 +1161,36 @@ def test_list_rest_flattened(): return_value = compute.TargetVpnGatewayList() # Wrap the value into a proper Response obj - json_return_value = compute.TargetVpnGatewayList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TargetVpnGatewayList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/targetVpnGateways" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1060,11 +1202,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = TargetVpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.TargetVpnGatewayList( @@ -1094,16 +1238,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.TargetVpnGateway) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1190,8 +1333,10 @@ def test_target_vpn_gateways_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_target_vpn_gateways_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1215,29 +1360,6 @@ def test_target_vpn_gateways_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_target_vpn_gateways_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.target_vpn_gateways.transports.TargetVpnGatewaysTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TargetVpnGatewaysTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_target_vpn_gateways_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1249,7 +1371,6 @@ def test_target_vpn_gateways_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_target_vpn_gateways_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1265,21 +1386,6 @@ def test_target_vpn_gateways_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_target_vpn_gateways_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TargetVpnGatewaysClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_target_vpn_gateways_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1426,3 +1532,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = TargetVpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_url_maps.py b/tests/unit/gapic/compute_v1/test_url_maps.py index bc45efef0..8b9719b65 100644 --- a/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/tests/unit/gapic/compute_v1/test_url_maps.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.url_maps import UrlMapsClient from google.cloud.compute_v1.services.url_maps import pagers from google.cloud.compute_v1.services.url_maps import transports -from google.cloud.compute_v1.services.url_maps.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -184,7 +168,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -193,6 +177,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -200,7 +185,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -209,6 +194,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -216,7 +202,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -225,6 +211,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -244,7 +231,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -253,6 +240,7 @@ def test_url_maps_client_client_options(client_class, transport_class, transport client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -283,7 +271,7 @@ def test_url_maps_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -300,6 +288,7 @@ def test_url_maps_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -324,7 +313,7 @@ def test_url_maps_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -333,6 +322,7 @@ def test_url_maps_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -345,7 +335,7 @@ def test_url_maps_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -354,6 +344,7 @@ def test_url_maps_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -368,7 +359,7 @@ def test_url_maps_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -377,6 +368,7 @@ def test_url_maps_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -391,7 +383,7 @@ def test_url_maps_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -400,6 +392,7 @@ def test_url_maps_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -410,33 +403,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.UrlMapsAggregatedList( id="id_value", - items={ - "key_value": compute.UrlMapsScopedList( - url_maps=[ - compute.UrlMap(creation_timestamp="creation_timestamp_value") - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapsAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapsAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -444,24 +429,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.UrlMapsScopedList( - url_maps=[compute.UrlMap(creation_timestamp="creation_timestamp_value")] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListUrlMapsRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -469,27 +473,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.UrlMapsAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapsAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapsAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/urlMaps" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -499,11 +512,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.UrlMapsAggregatedList( @@ -536,10 +551,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.UrlMapsScopedList) assert pager.get("h") is None @@ -554,7 +568,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.UrlMapsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -564,9 +578,9 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteUrlMapR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -576,7 +590,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteUrlMapR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -594,14 +607,13 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteUrlMapR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -612,7 +624,6 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteUrlMapR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -630,18 +641,40 @@ def test_delete_rest(transport: str = "rest", request_type=compute.DeleteUrlMapR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -649,30 +682,36 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( - project="project_value", url_map="url_map_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", url_map="url_map_value",) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -689,49 +728,29 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetUrlMapRequest credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.UrlMap( creation_timestamp="creation_timestamp_value", - default_route_action=compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ), default_service="default_service_value", - default_url_redirect=compute.HttpRedirectAction( - host_redirect="host_redirect_value" - ), description="description_value", fingerprint="fingerprint_value", - header_action=compute.HttpHeaderAction( - request_headers_to_add=[ - compute.HttpHeaderOption(header_name="header_name_value") - ] - ), - host_rules=[compute.HostRule(description="description_value")], id=205, kind="kind_value", name="name_value", - path_matchers=[ - compute.PathMatcher( - default_route_action=compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ) - ) - ], region="region_value", self_link="self_link_value", - tests=[compute.UrlMapTest(description="description_value")], ) # Wrap the value into a proper Response obj - json_return_value = compute.UrlMap.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMap.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -739,42 +758,47 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetUrlMapRequest # Establish that the response is the type that we expect. assert isinstance(response, compute.UrlMap) assert response.creation_timestamp == "creation_timestamp_value" - assert response.default_route_action == compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ) assert response.default_service == "default_service_value" - assert response.default_url_redirect == compute.HttpRedirectAction( - host_redirect="host_redirect_value" - ) assert response.description == "description_value" assert response.fingerprint == "fingerprint_value" - assert response.header_action == compute.HttpHeaderAction( - request_headers_to_add=[ - compute.HttpHeaderOption(header_name="header_name_value") - ] - ) - assert response.host_rules == [compute.HostRule(description="description_value")] assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" - assert response.path_matchers == [ - compute.PathMatcher( - default_route_action=compute.HttpRouteAction( - cors_policy=compute.CorsPolicy(allow_credentials=True) - ) - ) - ] assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.tests == [compute.UrlMapTest(description="description_value")] + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -782,30 +806,36 @@ def test_get_rest_flattened(): return_value = compute.UrlMap() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMap.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMap.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", url_map="url_map_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", url_map="url_map_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -822,9 +852,12 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertUrlMapR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -834,7 +867,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertUrlMapR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -852,14 +884,13 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertUrlMapR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -870,7 +901,6 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertUrlMapR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -888,18 +918,43 @@ def test_insert_rest(transport: str = "rest", request_type=compute.InsertUrlMapR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -907,35 +962,41 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_resource = compute.UrlMap(creation_timestamp="creation_timestamp_value") - client.insert( - project="project_value", url_map_resource=url_map_resource, + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMap.to_json( - url_map_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -956,9 +1017,12 @@ def test_invalidate_cache_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["cache_invalidation_rule_resource"] = compute.CacheInvalidationRule( + host="host_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -968,7 +1032,6 @@ def test_invalidate_cache_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -986,14 +1049,13 @@ def test_invalidate_cache_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.invalidate_cache(request) @@ -1004,7 +1066,6 @@ def test_invalidate_cache_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1022,18 +1083,43 @@ def test_invalidate_cache_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_invalidate_cache_rest_bad_request( + transport: str = "rest", request_type=compute.InvalidateCacheUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["cache_invalidation_rule_resource"] = compute.CacheInvalidationRule( + host="host_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.invalidate_cache(request) + + def test_invalidate_cache_rest_from_dict(): test_invalidate_cache_rest(request_type=dict) -def test_invalidate_cache_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_invalidate_cache_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1041,40 +1127,42 @@ def test_invalidate_cache_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - cache_invalidation_rule_resource = compute.CacheInvalidationRule( - host="host_value" - ) - client.invalidate_cache( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", url_map="url_map_value", - cache_invalidation_rule_resource=cache_invalidation_rule_resource, + cache_invalidation_rule_resource=compute.CacheInvalidationRule( + host="host_value" + ), ) + mock_args.update(sample_request) + client.invalidate_cache(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.CacheInvalidationRule.to_json( - cache_invalidation_rule_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_invalidate_cache_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps/{url_map}/invalidateCache" + % client.transport._host, + args[1], + ) + + +def test_invalidate_cache_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1094,26 +1182,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListUrlMapsRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.UrlMapList( id="id_value", - items=[compute.UrlMap(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1121,21 +1207,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListUrlMapsRequ # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.UrlMap(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListUrlMapsRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1143,27 +1250,36 @@ def test_list_rest_flattened(): return_value = compute.UrlMapList() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1173,11 +1289,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.UrlMapList( @@ -1199,16 +1317,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.UrlMap) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1218,9 +1335,12 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchUrlMapReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1230,7 +1350,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchUrlMapReq creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1248,14 +1367,13 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchUrlMapReq target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.patch(request) @@ -1266,7 +1384,6 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchUrlMapReq assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1284,18 +1401,43 @@ def test_patch_rest(transport: str = "rest", request_type=compute.PatchUrlMapReq assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_patch_rest_bad_request( + transport: str = "rest", request_type=compute.PatchUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.patch(request) + + def test_patch_rest_from_dict(): test_patch_rest(request_type=dict) -def test_patch_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_patch_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1303,38 +1445,42 @@ def test_patch_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_resource = compute.UrlMap(creation_timestamp="creation_timestamp_value") - client.patch( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", url_map="url_map_value", - url_map_resource=url_map_resource, + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.patch(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMap.to_json( - url_map_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_patch_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1354,9 +1500,12 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateUrlMapR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1366,7 +1515,6 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateUrlMapR creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1384,14 +1532,13 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateUrlMapR target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.update(request) @@ -1402,7 +1549,6 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateUrlMapR assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1420,18 +1566,43 @@ def test_update_rest(transport: str = "rest", request_type=compute.UpdateUrlMapR assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_update_rest_bad_request( + transport: str = "rest", request_type=compute.UpdateUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["url_map_resource"] = compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.update(request) + + def test_update_rest_from_dict(): test_update_rest(request_type=dict) -def test_update_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_update_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1439,38 +1610,42 @@ def test_update_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_map_resource = compute.UrlMap(creation_timestamp="creation_timestamp_value") - client.update( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", url_map="url_map_value", - url_map_resource=url_map_resource, + url_map_resource=compute.UrlMap( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.update(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMap.to_json( - url_map_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_update_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps/{url_map}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1492,38 +1667,64 @@ def test_validate_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["url_maps_validate_request_resource"] = compute.UrlMapsValidateRequest( + resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.UrlMapsValidateResponse( - result=compute.UrlMapValidationResult(load_errors=["load_errors_value"]), - ) + return_value = compute.UrlMapsValidateResponse() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.validate(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.UrlMapsValidateResponse) - assert response.result == compute.UrlMapValidationResult( - load_errors=["load_errors_value"] + + +def test_validate_rest_bad_request( + transport: str = "rest", request_type=compute.ValidateUrlMapRequest +): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "url_map": "sample2"} + request_init["url_maps_validate_request_resource"] = compute.UrlMapsValidateRequest( + resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.validate(request) + def test_validate_rest_from_dict(): test_validate_rest(request_type=dict) -def test_validate_rest_flattened(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_validate_rest_flattened(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1531,40 +1732,42 @@ def test_validate_rest_flattened(): return_value = compute.UrlMapsValidateResponse() # Wrap the value into a proper Response obj - json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.UrlMapsValidateResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - url_maps_validate_request_resource = compute.UrlMapsValidateRequest( - resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") - ) - client.validate( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "url_map": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", url_map="url_map_value", - url_maps_validate_request_resource=url_maps_validate_request_resource, + url_maps_validate_request_resource=compute.UrlMapsValidateRequest( + resource=compute.UrlMap(creation_timestamp="creation_timestamp_value") + ), ) + mock_args.update(sample_request) + client.validate(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "url_map_value" in http_call[1] + str(body) + str(params) - assert compute.UrlMapsValidateRequest.to_json( - url_maps_validate_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_validate_rest_flattened_error(): - client = UrlMapsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/global/urlMaps/{url_map}/validate" + % client.transport._host, + args[1], + ) + + +def test_validate_rest_flattened_error(transport: str = "rest"): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1663,8 +1866,10 @@ def test_url_maps_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_url_maps_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1688,29 +1893,6 @@ def test_url_maps_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_url_maps_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.url_maps.transports.UrlMapsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.UrlMapsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_url_maps_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1722,7 +1904,6 @@ def test_url_maps_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_url_maps_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1738,21 +1919,6 @@ def test_url_maps_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_url_maps_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - UrlMapsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_url_maps_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1899,3 +2065,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = UrlMapsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_vpn_gateways.py b/tests/unit/gapic/compute_v1/test_vpn_gateways.py index 3e6f8de5a..805c94597 100644 --- a/tests/unit/gapic/compute_v1/test_vpn_gateways.py +++ b/tests/unit/gapic/compute_v1/test_vpn_gateways.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.vpn_gateways import VpnGatewaysClient from google.cloud.compute_v1.services.vpn_gateways import pagers from google.cloud.compute_v1.services.vpn_gateways import transports -from google.cloud.compute_v1.services.vpn_gateways.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_vpn_gateways_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_vpn_gateways_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_vpn_gateways_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_vpn_gateways_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_vpn_gateways_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_vpn_gateways_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_vpn_gateways_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_vpn_gateways_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_vpn_gateways_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_vpn_gateways_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_vpn_gateways_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_vpn_gateways_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_vpn_gateways_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,35 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.VpnGatewayAggregatedList( id="id_value", - items={ - "key_value": compute.VpnGatewaysScopedList( - vpn_gateways=[ - compute.VpnGateway( - creation_timestamp="creation_timestamp_value" - ) - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.VpnGatewayAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGatewayAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -451,26 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.VpnGatewaysScopedList( - vpn_gateways=[ - compute.VpnGateway(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListVpnGatewaysRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -478,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.VpnGatewayAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.VpnGatewayAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGatewayAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/vpnGateways" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -508,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.VpnGatewayAggregatedList( @@ -545,10 +556,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.VpnGatewaysScopedList) assert pager.get("h") is None @@ -566,7 +576,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.VpnGatewaysScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -578,9 +588,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_gateway": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -590,7 +600,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -608,14 +617,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -626,7 +634,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -644,18 +651,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteVpnGatewayRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_gateway": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -663,33 +692,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "vpn_gateway": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", vpn_gateway="vpn_gateway_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -707,9 +747,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetVpnGatewayReq credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_gateway": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -720,18 +760,16 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetVpnGatewayReq id=205, kind="kind_value", label_fingerprint="label_fingerprint_value", - labels={"key_value": "value_value"}, name="name_value", network="network_value", region="region_value", self_link="self_link_value", - vpn_interfaces=[compute.VpnGatewayVpnGatewayInterface(id=205)], ) # Wrap the value into a proper Response obj - json_return_value = compute.VpnGateway.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGateway.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -743,20 +781,43 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetVpnGatewayReq assert response.id == 205 assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" - assert response.labels == {"key_value": "value_value"} assert response.name == "name_value" assert response.network == "network_value" assert response.region == "region_value" assert response.self_link == "self_link_value" - assert response.vpn_interfaces == [compute.VpnGatewayVpnGatewayInterface(id=205)] + + +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetVpnGatewayRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_gateway": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -764,33 +825,44 @@ def test_get_rest_flattened(): return_value = compute.VpnGateway() # Wrap the value into a proper Response obj - json_return_value = compute.VpnGateway.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGateway.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "vpn_gateway": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", vpn_gateway="vpn_gateway_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -810,48 +882,58 @@ def test_get_status_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_gateway": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.VpnGatewaysGetStatusResponse( - result=compute.VpnGatewayStatus( - vpn_connections=[ - compute.VpnGatewayStatusVpnConnection( - peer_external_gateway="peer_external_gateway_value" - ) - ] - ), - ) + return_value = compute.VpnGatewaysGetStatusResponse() # Wrap the value into a proper Response obj - json_return_value = compute.VpnGatewaysGetStatusResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGatewaysGetStatusResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get_status(request) # Establish that the response is the type that we expect. assert isinstance(response, compute.VpnGatewaysGetStatusResponse) - assert response.result == compute.VpnGatewayStatus( - vpn_connections=[ - compute.VpnGatewayStatusVpnConnection( - peer_external_gateway="peer_external_gateway_value" - ) - ] + + +def test_get_status_rest_bad_request( + transport: str = "rest", request_type=compute.GetStatusVpnGatewayRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_gateway": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get_status(request) + def test_get_status_rest_from_dict(): test_get_status_rest(request_type=dict) -def test_get_status_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_status_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -859,33 +941,44 @@ def test_get_status_rest_flattened(): return_value = compute.VpnGatewaysGetStatusResponse() # Wrap the value into a proper Response obj - json_return_value = compute.VpnGatewaysGetStatusResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGatewaysGetStatusResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_status( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "vpn_gateway": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", vpn_gateway="vpn_gateway_value", ) + mock_args.update(sample_request) + client.get_status(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "vpn_gateway_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{vpn_gateway}/getStatus" + % client.transport._host, + args[1], + ) -def test_get_status_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_status_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -905,9 +998,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["vpn_gateway_resource"] = compute.VpnGateway( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -917,7 +1013,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -935,14 +1030,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -953,7 +1047,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -971,18 +1064,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertVpnGatewayRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["vpn_gateway_resource"] = compute.VpnGateway( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -990,40 +1108,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - vpn_gateway_resource = compute.VpnGateway( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - vpn_gateway_resource=vpn_gateway_resource, + vpn_gateway_resource=compute.VpnGateway( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.VpnGateway.to_json( - vpn_gateway_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1045,26 +1165,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.VpnGatewayList( id="id_value", - items=[compute.VpnGateway(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.VpnGatewayList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGatewayList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -1072,21 +1190,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.VpnGateway(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListVpnGatewaysRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1094,30 +1233,36 @@ def test_list_rest_flattened(): return_value = compute.VpnGatewayList() # Wrap the value into a proper Response obj - json_return_value = compute.VpnGatewayList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnGatewayList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1129,11 +1274,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.VpnGatewayList( @@ -1163,16 +1310,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.VpnGateway) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1184,9 +1330,12 @@ def test_set_labels_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1196,7 +1345,6 @@ def test_set_labels_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -1214,14 +1362,13 @@ def test_set_labels_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.set_labels(request) @@ -1232,7 +1379,6 @@ def test_set_labels_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -1250,18 +1396,43 @@ def test_set_labels_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_set_labels_rest_bad_request( + transport: str = "rest", request_type=compute.SetLabelsVpnGatewayRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.set_labels(request) + + def test_set_labels_rest_from_dict(): test_set_labels_rest(request_type=dict) -def test_set_labels_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_set_labels_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1269,42 +1440,47 @@ def test_set_labels_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - region_set_labels_request_resource = compute.RegionSetLabelsRequest( - label_fingerprint="label_fingerprint_value" - ) - client.set_labels( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - region_set_labels_request_resource=region_set_labels_request_resource, + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), ) + mock_args.update(sample_request) + client.set_labels(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.RegionSetLabelsRequest.to_json( - region_set_labels_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_set_labels_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1327,9 +1503,12 @@ def test_test_iam_permissions_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1339,9 +1518,9 @@ def test_test_iam_permissions_rest( ) # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.test_iam_permissions(request) @@ -1351,12 +1530,40 @@ def test_test_iam_permissions_rest( assert response.permissions == ["permissions_value"] +def test_test_iam_permissions_rest_bad_request( + transport: str = "rest", request_type=compute.TestIamPermissionsVpnGatewayRequest +): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = compute.TestPermissionsRequest( + permissions=["permissions_value"] + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.test_iam_permissions(request) + + def test_test_iam_permissions_rest_from_dict(): test_test_iam_permissions_rest(request_type=dict) -def test_test_iam_permissions_rest_flattened(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_test_iam_permissions_rest_flattened(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1364,42 +1571,47 @@ def test_test_iam_permissions_rest_flattened(): return_value = compute.TestPermissionsResponse() # Wrap the value into a proper Response obj - json_return_value = compute.TestPermissionsResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.TestPermissionsResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - test_permissions_request_resource = compute.TestPermissionsRequest( - permissions=["permissions_value"] - ) - client.test_iam_permissions( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", resource="resource_value", - test_permissions_request_resource=test_permissions_request_resource, + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), ) + mock_args.update(sample_request) + client.test_iam_permissions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "resource_value" in http_call[1] + str(body) + str(params) - assert compute.TestPermissionsRequest.to_json( - test_permissions_request_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_test_iam_permissions_rest_flattened_error(): - client = VpnGatewaysClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnGateways/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1498,8 +1710,10 @@ def test_vpn_gateways_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_vpn_gateways_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1523,29 +1737,6 @@ def test_vpn_gateways_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_vpn_gateways_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.vpn_gateways.transports.VpnGatewaysTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.VpnGatewaysTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_vpn_gateways_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1557,7 +1748,6 @@ def test_vpn_gateways_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_vpn_gateways_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1573,21 +1763,6 @@ def test_vpn_gateways_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_vpn_gateways_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - VpnGatewaysClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_vpn_gateways_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1734,3 +1909,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = VpnGatewaysClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_vpn_tunnels.py b/tests/unit/gapic/compute_v1/test_vpn_tunnels.py index 07634e266..af4f90a36 100644 --- a/tests/unit/gapic/compute_v1/test_vpn_tunnels.py +++ b/tests/unit/gapic/compute_v1/test_vpn_tunnels.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.vpn_tunnels import VpnTunnelsClient from google.cloud.compute_v1.services.vpn_tunnels import pagers from google.cloud.compute_v1.services.vpn_tunnels import transports -from google.cloud.compute_v1.services.vpn_tunnels.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -189,7 +173,7 @@ def test_vpn_tunnels_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -198,6 +182,7 @@ def test_vpn_tunnels_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -205,7 +190,7 @@ def test_vpn_tunnels_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -214,6 +199,7 @@ def test_vpn_tunnels_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -221,7 +207,7 @@ def test_vpn_tunnels_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -230,6 +216,7 @@ def test_vpn_tunnels_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -249,7 +236,7 @@ def test_vpn_tunnels_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -258,6 +245,7 @@ def test_vpn_tunnels_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -288,7 +276,7 @@ def test_vpn_tunnels_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -305,6 +293,7 @@ def test_vpn_tunnels_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -329,7 +318,7 @@ def test_vpn_tunnels_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,6 +327,7 @@ def test_vpn_tunnels_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -350,7 +340,7 @@ def test_vpn_tunnels_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -359,6 +349,7 @@ def test_vpn_tunnels_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -373,7 +364,7 @@ def test_vpn_tunnels_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -382,6 +373,7 @@ def test_vpn_tunnels_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -396,7 +388,7 @@ def test_vpn_tunnels_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -405,6 +397,7 @@ def test_vpn_tunnels_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -415,33 +408,25 @@ def test_aggregated_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.VpnTunnelAggregatedList( id="id_value", - items={ - "key_value": compute.VpnTunnelsScopedList( - vpn_tunnels=[ - compute.VpnTunnel(creation_timestamp="creation_timestamp_value") - ] - ) - }, kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", unreachables=["unreachables_value"], - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.VpnTunnelAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnTunnelAggregatedList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.aggregated_list(request) @@ -449,26 +434,43 @@ def test_aggregated_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.AggregatedListPager) assert response.id == "id_value" - assert response.items == { - "key_value": compute.VpnTunnelsScopedList( - vpn_tunnels=[ - compute.VpnTunnel(creation_timestamp="creation_timestamp_value") - ] - ) - } assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" assert response.unreachables == ["unreachables_value"] - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_aggregated_list_rest_bad_request( + transport: str = "rest", request_type=compute.AggregatedListVpnTunnelsRequest +): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.aggregated_list(request) def test_aggregated_list_rest_from_dict(): test_aggregated_list_rest(request_type=dict) -def test_aggregated_list_rest_flattened(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -476,27 +478,36 @@ def test_aggregated_list_rest_flattened(): return_value = compute.VpnTunnelAggregatedList() # Wrap the value into a proper Response obj - json_return_value = compute.VpnTunnelAggregatedList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnTunnelAggregatedList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.aggregated_list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.aggregated_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/aggregated/vpnTunnels" + % client.transport._host, + args[1], + ) -def test_aggregated_list_rest_flattened_error(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -506,11 +517,13 @@ def test_aggregated_list_rest_flattened_error(): ) -def test_aggregated_list_pager(): +def test_aggregated_list_rest_pager(): client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.VpnTunnelAggregatedList( @@ -543,10 +556,9 @@ def test_aggregated_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.aggregated_list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.aggregated_list(request=sample_request) assert isinstance(pager.get("a"), compute.VpnTunnelsScopedList) assert pager.get("h") is None @@ -561,7 +573,7 @@ def test_aggregated_list_pager(): assert pager.get("a") is None assert isinstance(pager.get("h"), compute.VpnTunnelsScopedList) - pages = list(client.aggregated_list(request={}).pages) + pages = list(client.aggregated_list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -573,9 +585,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_tunnel": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -585,7 +597,6 @@ def test_delete_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -603,14 +614,13 @@ def test_delete_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -621,7 +631,6 @@ def test_delete_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -639,18 +648,40 @@ def test_delete_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteVpnTunnelRequest +): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_tunnel": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -658,33 +689,44 @@ def test_delete_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "vpn_tunnel": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", vpn_tunnel="vpn_tunnel_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "vpn_tunnel_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -702,9 +744,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetVpnTunnelRequ credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_tunnel": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -735,9 +777,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetVpnTunnelRequ ) # Wrap the value into a proper Response obj - json_return_value = compute.VpnTunnel.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnTunnel.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -768,12 +810,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetVpnTunnelRequ assert response.vpn_gateway_interface == 2229 +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetVpnTunnelRequest +): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "vpn_tunnel": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -781,33 +848,44 @@ def test_get_rest_flattened(): return_value = compute.VpnTunnel() # Wrap the value into a proper Response obj - json_return_value = compute.VpnTunnel.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnTunnel.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "vpn_tunnel": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", vpn_tunnel="vpn_tunnel_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert "vpn_tunnel_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels/{vpn_tunnel}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -827,9 +905,12 @@ def test_insert_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["vpn_tunnel_resource"] = compute.VpnTunnel( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -839,7 +920,6 @@ def test_insert_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -857,14 +937,13 @@ def test_insert_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.insert(request) @@ -875,7 +954,6 @@ def test_insert_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -893,18 +971,43 @@ def test_insert_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_insert_rest_bad_request( + transport: str = "rest", request_type=compute.InsertVpnTunnelRequest +): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["vpn_tunnel_resource"] = compute.VpnTunnel( + creation_timestamp="creation_timestamp_value" + ) + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.insert(request) + + def test_insert_rest_from_dict(): test_insert_rest(request_type=dict) -def test_insert_rest_flattened(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_insert_rest_flattened(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -912,40 +1015,42 @@ def test_insert_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - vpn_tunnel_resource = compute.VpnTunnel( - creation_timestamp="creation_timestamp_value" - ) - client.insert( + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( project="project_value", region="region_value", - vpn_tunnel_resource=vpn_tunnel_resource, + vpn_tunnel_resource=compute.VpnTunnel( + creation_timestamp="creation_timestamp_value" + ), ) + mock_args.update(sample_request) + client.insert(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) - assert compute.VpnTunnel.to_json( - vpn_tunnel_resource, - including_default_value_fields=False, - use_integers_for_enums=False, - ) in http_call[1] + str(body) + str(params) - - -def test_insert_rest_flattened_error(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -965,26 +1070,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListVpnTunnelsR credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.VpnTunnelList( id="id_value", - items=[compute.VpnTunnel(creation_timestamp="creation_timestamp_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.VpnTunnelList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnTunnelList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -992,21 +1095,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListVpnTunnelsR # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.VpnTunnel(creation_timestamp="creation_timestamp_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListVpnTunnelsRequest +): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -1014,30 +1138,36 @@ def test_list_rest_flattened(): return_value = compute.VpnTunnelList() # Wrap the value into a proper Response obj - json_return_value = compute.VpnTunnelList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.VpnTunnelList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", region="region_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", region="region_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "region_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/regions/{region}/vpnTunnels" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1049,11 +1179,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = VpnTunnelsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.VpnTunnelList( @@ -1075,16 +1207,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "region": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.VpnTunnel) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -1169,8 +1300,10 @@ def test_vpn_tunnels_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_vpn_tunnels_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -1194,29 +1327,6 @@ def test_vpn_tunnels_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_vpn_tunnels_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.vpn_tunnels.transports.VpnTunnelsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.VpnTunnelsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_vpn_tunnels_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1228,7 +1338,6 @@ def test_vpn_tunnels_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_vpn_tunnels_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1244,21 +1353,6 @@ def test_vpn_tunnels_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_vpn_tunnels_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - VpnTunnelsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_vpn_tunnels_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1405,3 +1499,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = VpnTunnelsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_zone_operations.py b/tests/unit/gapic/compute_v1/test_zone_operations.py index 478c71c38..616fbc4be 100644 --- a/tests/unit/gapic/compute_v1/test_zone_operations.py +++ b/tests/unit/gapic/compute_v1/test_zone_operations.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,33 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.zone_operations import ZoneOperationsClient from google.cloud.compute_v1.services.zone_operations import pagers from google.cloud.compute_v1.services.zone_operations import transports -from google.cloud.compute_v1.services.zone_operations.transports.base import ( - _GOOGLE_AUTH_VERSION, -) from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -195,7 +179,7 @@ def test_zone_operations_client_client_options( options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -204,6 +188,7 @@ def test_zone_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -211,7 +196,7 @@ def test_zone_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -220,6 +205,7 @@ def test_zone_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -227,7 +213,7 @@ def test_zone_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -236,6 +222,7 @@ def test_zone_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -255,7 +242,7 @@ def test_zone_operations_client_client_options( options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -264,6 +251,7 @@ def test_zone_operations_client_client_options( client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -296,7 +284,7 @@ def test_zone_operations_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -313,6 +301,7 @@ def test_zone_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -337,7 +326,7 @@ def test_zone_operations_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -346,6 +335,7 @@ def test_zone_operations_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -358,7 +348,7 @@ def test_zone_operations_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -367,6 +357,7 @@ def test_zone_operations_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -381,7 +372,7 @@ def test_zone_operations_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -390,6 +381,7 @@ def test_zone_operations_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -404,7 +396,7 @@ def test_zone_operations_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -413,6 +405,7 @@ def test_zone_operations_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -423,9 +416,9 @@ def test_delete_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "operation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -433,9 +426,9 @@ def test_delete_rest( return_value = compute.DeleteZoneOperationResponse() # Wrap the value into a proper Response obj - json_return_value = compute.DeleteZoneOperationResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DeleteZoneOperationResponse.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.delete(request) @@ -444,12 +437,37 @@ def test_delete_rest( assert isinstance(response, compute.DeleteZoneOperationResponse) +def test_delete_rest_bad_request( + transport: str = "rest", request_type=compute.DeleteZoneOperationRequest +): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "operation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.delete(request) + + def test_delete_rest_from_dict(): test_delete_rest(request_type=dict) -def test_delete_rest_flattened(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -457,31 +475,42 @@ def test_delete_rest_flattened(): return_value = compute.DeleteZoneOperationResponse() # Wrap the value into a proper Response obj - json_return_value = compute.DeleteZoneOperationResponse.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.DeleteZoneOperationResponse.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "operation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", operation="operation_value", ) + mock_args.update(sample_request) + client.delete(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_delete_rest_flattened_error(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_delete_rest_flattened_error(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -501,9 +530,9 @@ def test_get_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "operation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -513,7 +542,6 @@ def test_get_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -531,14 +559,13 @@ def test_get_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -549,7 +576,6 @@ def test_get_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -567,18 +593,40 @@ def test_get_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetZoneOperationRequest +): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "operation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -586,31 +634,42 @@ def test_get_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "operation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", operation="operation_value", ) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/operations/{operation}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -630,26 +689,24 @@ def test_list_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.OperationList( id="id_value", - items=[compute.Operation(client_operation_id="client_operation_id_value")], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -657,21 +714,42 @@ def test_list_rest( # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Operation(client_operation_id="client_operation_id_value") - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListZoneOperationsRequest +): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -679,30 +757,36 @@ def test_list_rest_flattened(): return_value = compute.OperationList() # Wrap the value into a proper Response obj - json_return_value = compute.OperationList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.OperationList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/operations" + % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -714,11 +798,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.OperationList( @@ -740,16 +826,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1", "zone": "sample2"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Operation) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -761,9 +846,9 @@ def test_wait_rest( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "operation": "sample3"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -773,7 +858,6 @@ def test_wait_rest( creation_timestamp="creation_timestamp_value", description="description_value", end_time="end_time_value", - error=compute.Error(errors=[compute.Errors(code="code_value")]), http_error_message="http_error_message_value", http_error_status_code=2374, id=205, @@ -791,14 +875,13 @@ def test_wait_rest( target_id=947, target_link="target_link_value", user="user_value", - warnings=[compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED)], zone="zone_value", ) # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.wait(request) @@ -809,7 +892,6 @@ def test_wait_rest( assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.end_time == "end_time_value" - assert response.error == compute.Error(errors=[compute.Errors(code="code_value")]) assert response.http_error_message == "http_error_message_value" assert response.http_error_status_code == 2374 assert response.id == 205 @@ -827,18 +909,40 @@ def test_wait_rest( assert response.target_id == 947 assert response.target_link == "target_link_value" assert response.user == "user_value" - assert response.warnings == [ - compute.Warnings(code=compute.Warnings.Code.CLEANUP_FAILED) - ] assert response.zone == "zone_value" +def test_wait_rest_bad_request( + transport: str = "rest", request_type=compute.WaitZoneOperationRequest +): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "operation": "sample3"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.wait(request) + + def test_wait_rest_from_dict(): test_wait_rest(request_type=dict) -def test_wait_rest_flattened(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_wait_rest_flattened(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -846,31 +950,42 @@ def test_wait_rest_flattened(): return_value = compute.Operation() # Wrap the value into a proper Response obj - json_return_value = compute.Operation.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Operation.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.wait( + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "operation": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( project="project_value", zone="zone_value", operation="operation_value", ) + mock_args.update(sample_request) + client.wait(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) - assert "operation_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}/operations/{operation}/wait" + % client.transport._host, + args[1], + ) -def test_wait_rest_flattened_error(): - client = ZoneOperationsClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_wait_rest_flattened_error(transport: str = "rest"): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -962,8 +1077,10 @@ def test_zone_operations_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_zone_operations_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -987,29 +1104,6 @@ def test_zone_operations_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_zone_operations_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.zone_operations.transports.ZoneOperationsTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ZoneOperationsTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_zone_operations_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -1021,7 +1115,6 @@ def test_zone_operations_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_zone_operations_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -1037,21 +1130,6 @@ def test_zone_operations_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_zone_operations_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ZoneOperationsClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_zone_operations_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -1198,3 +1276,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ZoneOperationsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() diff --git a/tests/unit/gapic/compute_v1/test_zones.py b/tests/unit/gapic/compute_v1/test_zones.py index db3ca996b..141a28eca 100644 --- a/tests/unit/gapic/compute_v1/test_zones.py +++ b/tests/unit/gapic/compute_v1/test_zones.py @@ -15,7 +15,6 @@ # import os import mock -import packaging.version import grpc from grpc.experimental import aio @@ -24,6 +23,7 @@ from proto.marshal.rules.dates import DurationRule, TimestampRule from requests import Response +from requests import Request from requests.sessions import Session from google.api_core import client_options @@ -31,31 +31,17 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import path_template from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError from google.cloud.compute_v1.services.zones import ZonesClient from google.cloud.compute_v1.services.zones import pagers from google.cloud.compute_v1.services.zones import transports -from google.cloud.compute_v1.services.zones.transports.base import _GOOGLE_AUTH_VERSION from google.cloud.compute_v1.types import compute from google.oauth2 import service_account import google.auth -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - - def client_cert_source_callback(): return b"cert bytes", b"key bytes" @@ -180,7 +166,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -189,6 +175,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -196,7 +183,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -205,6 +192,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is @@ -212,7 +200,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -221,6 +209,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has @@ -240,7 +229,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -249,6 +238,7 @@ def test_zones_client_client_options(client_class, transport_class, transport_na client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -279,7 +269,7 @@ def test_zones_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) if use_client_cert_env == "false": expected_client_cert_source = None @@ -296,6 +286,7 @@ def test_zones_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case ADC client cert is provided. Whether client cert is used depends on @@ -320,7 +311,7 @@ def test_zones_client_mtls_env_auto( expected_client_cert_source = client_cert_source_callback patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -329,6 +320,7 @@ def test_zones_client_mtls_env_auto( client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) # Check the case client_cert_source and ADC client cert are not provided. @@ -341,7 +333,7 @@ def test_zones_client_mtls_env_auto( return_value=False, ): patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -350,6 +342,7 @@ def test_zones_client_mtls_env_auto( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -364,7 +357,7 @@ def test_zones_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -373,6 +366,7 @@ def test_zones_client_client_options_scopes( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -387,7 +381,7 @@ def test_zones_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(transport=transport_name, client_options=options) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -396,6 +390,7 @@ def test_zones_client_client_options_credentials_file( client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, ) @@ -404,9 +399,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetZoneRequest): credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -414,7 +409,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetZoneRequest): return_value = compute.Zone( available_cpu_platforms=["available_cpu_platforms_value"], creation_timestamp="creation_timestamp_value", - deprecated=compute.DeprecationStatus(deleted="deleted_value"), description="description_value", id=205, kind="kind_value", @@ -426,9 +420,9 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetZoneRequest): ) # Wrap the value into a proper Response obj - json_return_value = compute.Zone.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Zone.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.get(request) @@ -437,7 +431,6 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetZoneRequest): assert isinstance(response, compute.Zone) assert response.available_cpu_platforms == ["available_cpu_platforms_value"] assert response.creation_timestamp == "creation_timestamp_value" - assert response.deprecated == compute.DeprecationStatus(deleted="deleted_value") assert response.description == "description_value" assert response.id == 205 assert response.kind == "kind_value" @@ -448,12 +441,37 @@ def test_get_rest(transport: str = "rest", request_type=compute.GetZoneRequest): assert response.supports_pzs is True +def test_get_rest_bad_request( + transport: str = "rest", request_type=compute.GetZoneRequest +): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.get(request) + + def test_get_rest_from_dict(): test_get_rest(request_type=dict) -def test_get_rest_flattened(): - client = ZonesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened(transport: str = "rest"): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -461,30 +479,36 @@ def test_get_rest_flattened(): return_value = compute.Zone() # Wrap the value into a proper Response obj - json_return_value = compute.Zone.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.Zone.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get( - project="project_value", zone="zone_value", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value", zone="zone_value",) + mock_args.update(sample_request) + client.get(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) - assert "zone_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones/{zone}" + % client.transport._host, + args[1], + ) -def test_get_rest_flattened_error(): - client = ZonesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_get_rest_flattened_error(transport: str = "rest"): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -499,28 +523,24 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListZonesReques credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # Designate an appropriate value for the returned response. return_value = compute.ZoneList( id="id_value", - items=[ - compute.Zone(available_cpu_platforms=["available_cpu_platforms_value"]) - ], kind="kind_value", next_page_token="next_page_token_value", self_link="self_link_value", - warning=compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED), ) # Wrap the value into a proper Response obj - json_return_value = compute.ZoneList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ZoneList.to_json(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value response = client.list(request) @@ -528,21 +548,42 @@ def test_list_rest(transport: str = "rest", request_type=compute.ListZonesReques # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListPager) assert response.id == "id_value" - assert response.items == [ - compute.Zone(available_cpu_platforms=["available_cpu_platforms_value"]) - ] assert response.kind == "kind_value" assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.warning == compute.Warning(code=compute.Warning.Code.CLEANUP_FAILED) + + +def test_list_rest_bad_request( + transport: str = "rest", request_type=compute.ListZonesRequest +): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with mock.patch.object(Session, "request") as req, pytest.raises( + core_exceptions.BadRequest + ): + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + client.list(request) def test_list_rest_from_dict(): test_list_rest(request_type=dict) -def test_list_rest_flattened(): - client = ZonesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened(transport: str = "rest"): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: @@ -550,27 +591,35 @@ def test_list_rest_flattened(): return_value = compute.ZoneList() # Wrap the value into a proper Response obj - json_return_value = compute.ZoneList.to_json(return_value) response_value = Response() response_value.status_code = 200 + json_return_value = compute.ZoneList.to_json(return_value) + response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list(project="project_value",) + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict(project="project_value",) + mock_args.update(sample_request) + client.list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 - _, http_call, http_params = req.mock_calls[0] - body = http_params.get("data") - params = http_params.get("params") - assert "project_value" in http_call[1] + str(body) + str(params) + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "https://%s/compute/v1/projects/{project}/zones" % client.transport._host, + args[1], + ) -def test_list_rest_flattened_error(): - client = ZonesClient(credentials=ga_credentials.AnonymousCredentials(),) +def test_list_rest_flattened_error(transport: str = "rest"): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport, + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -580,11 +629,13 @@ def test_list_rest_flattened_error(): ) -def test_list_pager(): +def test_list_rest_pager(): client = ZonesClient(credentials=ga_credentials.AnonymousCredentials(),) # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( compute.ZoneList( @@ -606,16 +657,15 @@ def test_list_pager(): return_val.status_code = 200 req.side_effect = return_values - metadata = () - pager = client.list(request={}) + sample_request = {"project": "sample1"} - assert pager._metadata == metadata + pager = client.list(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.Zone) for i in results) - pages = list(client.list(request={}).pages) + pages = list(client.list(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -697,8 +747,10 @@ def test_zones_base_transport(): with pytest.raises(NotImplementedError): getattr(transport, method)(request=object()) + with pytest.raises(NotImplementedError): + transport.close() + -@requires_google_auth_gte_1_25_0 def test_zones_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file with mock.patch.object( @@ -723,30 +775,6 @@ def test_zones_base_transport_with_credentials_file(): ) -@requires_google_auth_lt_1_25_0 -def test_zones_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.compute_v1.services.zones.transports.ZonesTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.ZonesTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id="octopus", - ) - - def test_zones_base_transport_with_adc(): # Test the default credentials are used if credentials and credentials_file are None. with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( @@ -758,7 +786,6 @@ def test_zones_base_transport_with_adc(): adc.assert_called_once() -@requires_google_auth_gte_1_25_0 def test_zones_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: @@ -775,22 +802,6 @@ def test_zones_auth_adc(): ) -@requires_google_auth_lt_1_25_0 -def test_zones_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - ZonesClient() - adc.assert_called_once_with( - scopes=( - "https://www.googleapis.com/auth/compute.readonly", - "https://www.googleapis.com/auth/compute", - "https://www.googleapis.com/auth/cloud-platform", - ), - quota_project_id=None, - ) - - def test_zones_http_transport_client_cert_source_for_mtls(): cred = ga_credentials.AnonymousCredentials() with mock.patch( @@ -933,3 +944,36 @@ def test_client_withDEFAULT_CLIENT_INFO(): credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, ) prep.assert_called_once_with(client_info) + + +def test_transport_close(): + transports = { + "rest": "_session", + } + + for transport, close_name in transports.items(): + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + with mock.patch.object( + type(getattr(client.transport, close_name)), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ZonesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called()